var/home/core/zuul-output/0000755000175000017500000000000015134264527014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134275607015503 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000270075115134275547020277 0ustar corecoreg{qikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD )?KYI_翪|mvşo#oVݏKf+ovpZjC4%_̿f\ϘקjzuQ6/㴻|]=ry+/vWŊ7 .=*EbqZnx.h{nۯSa ׋D*%(Ϗ_϶ݬvGR)$DD D~m{]iX\|U. $ॄKЗ/83Jp ώI8&xėv=E|;FmZl8T*v (6pk**+ Le*gUWi [ӊg*ͼ~aT(;`KZ)&@i{ C2i1Gdē _%Kٻւ(Ĩ$#TLX h~lys%v6:SFA֗f΀QՇ2Kݙ$ӎ;IXN :7sL0x.`6)ɚL}ӄ]C }I4Vv@%٘e#dc0Fn 촂iHSr`岮X7̝4?qKf, # qe䧤 ss]QzH.ad!rJBi`V +|i}}THW{y|*/BP3m3A- ZPmN^iL[NrrݝE)~QGGAj^3}wy/{47[q)&c(޸0"$5ڪҾη*t:%?vEmO5tqÜ3Cyu '~qlN?}|nLFR6f8yWxYd ;K44|CK4UQviYDZh$#*)e\W$IAT;s0Gp}=9ڠedۜ+EaH#QtDV:?7#w4r_۾8ZJ%PgS!][5ߜQZ݇~- MR9z_Z;57xh|_/CWuU%v[_((G yMi@'3Pmz8~Y >hl%}Р`sMC77Aztԝp ,}Nptt%q6& ND lM;ָPZGa(X(2*91n,50/mx'})')SĔv}S%xhRe)a@r AF' ]J)ӨbqMWNjʵ2PK-guZZg !M)a(!H/?R?Q~}% ;]/ľv%T&hoP~(*טj=dߛ_SRzSa™:']*}EXɧM<@:jʨΨrPE%NT&1H>g":ͨ ҄v`tYoTq&OzcP_k(PJ'ήYXFgGہwħkIM*򸆔l=q VJީ#b8&RgX2qBMoN w1ђZGd m 2P/Ɛ!" aGd;0RZ+ 9O5KiPc7CDG.b~?|ђP? -8%JNIt"`HP!]ZrͰ4j8!*(jPcǷ!)'xmv>!0[r_G{j 6JYǹ>zs;tc.mctie:x&"bR4S uV8/0%X8Ua0NET݃jYAT` &AD]Ax95mvXYs"(A+/_+*{b }@UP*5ì"M|܊W7|}N{mL=d]' =MS2[3(/hoj$=Zm Mlh>P>Qwf8*c4˥Ęk(+,«.c%_~&^%80=1Jgͤ39(&ʤdH0Ζ@.!)CGt?~=ˢ>f>\bN<Ⱦtë{{b2hKNh`0=/9Gɺɔ+'Х[)9^iX,N&+1Id0ֶ|}!oѶvhu|8Qz:^S-7;k>U~H><~5i ˿7^0*]h,*aklVIKS7d'qAWEݰLkS :}%J6TIsbFʶ褢sFUC)(k-C"TQ[;4j39_WiZSس:$3w}o$[4x:bl=pd9YfAMpIrv̡}XI{B%ZԎuHvhd`Η|ʣ)-iaE';_j{(8xPA*1bv^JLj&DY3#-1*I+g8a@(*%kX{ Z;#es=oi_)qb㼃{buU?zT u]68 QeC Hl @R SFZuU&uR~Aomˠ_>2h-/ ђ(9Uq EmFjq1jX]DןR24d XrH_HI\:U}UE$J @ٚeZE0(8ŋ ϓ{Ba>EE衢^}p/:F?}bi0>Oh%\x(bdF"F 'u Qx`j#(g6zƯRo(lџŤnE7^k(|(4s\9#.\r= (mO(f=rWmd'rDZ~;o\mkmB`s ~7!GdјCyEߖs|n|zu0VhI|/{}BC6q>HĜ]Xgy G[Ŷ.|37xo=N4wjDH>:&EOΆ<䧊1v@b&툒f!yO){~%gq~.LK78F#E01g.u7^Ew_lv۠M0}qk:Lx%` urJp)>I(>z`{|puB"8#YkrZ .`h(eek[?̱ՒOOc&!dVzMEHH*V"MC Qؽ1Omsz/v0vȌJBIG,CNˆ-L{L #cNqgVR2r뭲⭊ڰ08uirP qNUӛ<|߈$m뫷dùB Z^-_dsz=F8jH˽&DUh+9k̈́W^̤F˖.kL5̻wS"!5<@&] WE\wMc%={_bD&k 5:lb69OBCC*Fn) u{Hk|v;tCl2m s]-$zQpɡr~]Si!ڣZmʢ鉗phw j8\c4>0` R?da,ȍ/ءfQ 2ؐfc}l 2窾ۉ1k;A@z>T+DE 6Хm<쉶K`'#NC5CL]5ݶI5XK.N)Q!>zt?zpPC ¶.vBTcm"Bsp rjﺧK]0/k<'dzM2dk–flE]_vE P / څZg`9r| 5W;`.4&XkĴp 6l0Cз5O[{B-bC\/`m(9A< f`mPіpNЦXn6g5m 7aTcTA,} q:|CBp_uFȆx6ڮܷnZ8dsMS^HэUlq 8\C[n膗:68DkM\7"Ǻzfbx]ۮC=1ÓOv$sY6eX%]Y{⦁# &SlM'iMJ았 t% ~@1c@K?k^rEXws zz.8`hiPܮbC7~n b?`CtjT6l>X+,Qb5ȳp`FMeXÅ0+!86{V5y8 M`_Uw ȗkU]a[.D}"\I5/1o٩|U戻,6t錳"EFk:ZM/!ݛ@pRu Iヵvyne 0=HH3n@.>C@{GP 9::3(6e™nvOσ =?6ͪ)Bppًu_w/m/0}T>CUX\!xl=ZVM\aٟ6h㗶E۶{O#X26.Fٱq1M k'JE%"2.*""]8yܑ4> >X1 smD) ̙TީXfnOFg㧤[Lo)[fLPBRB+x7{{? ףro_nն-2n6 Ym^]IL'M+;U t>x]U5g B(, qA9r;$IN&CM(F+ hGI~Q<웰[, qnriY]3_P${,<\V}7T g6Zapto}PhS/b&X0$Ba{a`W%ATevoYFF"4En.O8ϵq\FOXƀf qbTLhlw?8p@{]oOtsϑ`94t1!F PI;i`ޮMLX7sTGP7^s08p15w q o(uLYQB_dWoc0a#K1P,8]P)\wEZ(VҠQBT^e^0F;)CtT+{`Bh"% !.bBQPnT4ƈRa[F=3}+BVE~8R{3,>0|:,5j358W]>!Q1"6oT[ҟ^T;725Xa+wqlR)<#!9!籈K*:!@NI^S"H=ofLx _lp ꖚӜ3C 4dM @x>ۙZh _uoֺip&1ڙʪ4\RF_04H8@>fXmpLJ5jRS}_D U4x[c) ,`̔Dvckk5Ťã0le۞]o~oW(91ݧ$uxp/Cq6Un9%ZxðvGL qG $ X:w06 E=oWlzN7st˪C:?*|kިfc]| &ب^[%F%LI<0(씖;4A\`TQ.b0NH;ݹ/n -3!: _Jq#Bh^4p|-G7|ڸ=Bx)kre_f |Nm8p5H!jR@Aiߒ߈ۥLFTk"5l9O'ϓl5x|_®&&n]#r̥jOڧK)lsXg\{Md-% >~Ӈ/( [ycy`ðSmn_O;3=Av3LA׊onxlM?~n Θ5 ӂxzPMcVQ@ӤomY42nrQ\'"P؝J7g+#!k{paqTԫ?o?VU}aK q;T0zqaj0"2p؋9~bޏt>$AZLk;3qUlWU Ry==qrͅޕ6ql?N/e1N2i"0(HKkD4<80: M:'֥P!r "Lӓݰ@ 9n# " $fGgKQӦ4}Gn\^=-Y5PI dPN6 Ozځ/פ|5) F[ڣ$2*%&h v%9HN H~Q+oi?&۳)-nqK?2ސv/3,9ҮT9Cef˝49i.2DxatC<8iR/ƬйR֌vN8J"iJ. T>)qaY4ͬlyg "]BvW#99`TegõII kюHLa^c&/H^FFIu`2a$mc Ry+R:LڕDܓ>Y:]t.+|PT6=qWe0NƏw<6o3mv8k vGOfpEOkÈWȤMف lOc;SR&.w,qk>MPs+Xh4iyuGRd֞q鮺]m S{}]U kV0/ŜxtADx"Xh4|;XSxߵă@pE:y]/"(MCG`ʶϊGi+39#gNZYE:Qw9muB`9`LDhs4Ǩ9S`EkM{zB<˙ik; JD;;3!4 2Y.$Dwiu|+lO:k$]ԜYLUҞ6EmH>azʳ/A+ԀZk"f`.,ל{=wh|_qYj5M{K$gv>cDp"'0޽5xCNQ1G2})*'>fC۝'*)"5.E2IeD 2.ZdrN6Uœ=n8D-9޵JKw5ُJ,􋃓ZUꋼ0b1f87GՂ 1t_o}{Mr7KO0Ao-Y*Is\S:JzA(:i!eҎ\,f+,Ąt78~ڋ~?[F^.A'!,iGow3{'YToҝf5ޓ[he>=7S8DGZ@-#]f:Tm?L{F-8G#%.fM8Y='gیl0HڜHLK'Cw#)krWIk<1څ 9abHl:b3LjOq͂Ӥ=u8#E2;|z꽐vɀi^lUt␚ɓW%OVc8|*yI0U=nFGA`IC8p+C:!}Nh,mn>_MGiq'N~|z`|mu}r:"KiyGҪ$& hw#4qn?ܶХfm_Ov^ܶ[6j3ZN9t9ZMMM)I[Rχ/C|W䳮yI3MڼH9iEG&V 'x`u.̀ab7V<*EzfH{]:*6M x-v쳎M'.hO3p-IGh ܆hR ]zi2hB9'S_;I/d0oIU:m/~[*K1QA="D:V&f:{7N>^uU` c/X)mS5KC߄":{H)"%,!3w{"ZWÂk>/F?RJ>FIY*%5Hg}3Ď89؟N/pgÞ tJXB-Gjsٶ 3Gzp؍H|*cyp@\첹,[up`uV,\KCB\qGiW痃[?i?S{eϻl71X:݌>EEly(*SHN:ӫOq{{L$?Q{϶(F_Ej>3mqfΤP-j)H˧&8?a?2xĐ+EV؍x0bv6 fd1^ 2ӎԥ sZR cgu/bn/34'h9Dݥ:U:vV[ 'Mȥ@ەX㧿-p0?Q6 y2XN2_h~Cֆ֙82)=Ȓ7D- V)T? O/VFeUk'7KIT, WeՔ}-66V؅ʹ;T$pZ#@L; ?0]"2v[hׂ'cJ6H4bs+3(@z$.K!#Šj2ݢxK-di +9Hᇷ絻+ O.i2.I+69EVyw8//|~<ëng)P<xͯ~? fp,CǴ_BjDN^5)s('cBh+6ez0)_~zJz"ё`Z&Z![0rGBK 5G~<:H~W>;ٍVnSt%_!BZMMeccBҎÒJH+"ūyR}X~juPp- j\hЪQxchKaS,xS"cV8i8'-sOKB<չw"|{/MC8&%Og3E#O%`N)p#4YUh^ ɨڻ#Ch@(R &Z+<3ݰb/St=&yo|BL,1+t C<ˉvRfQ*e"T:*Dᰤ*~IClz^F6!ܠqK3%$E)~?wy,u'u() C>Gn} t]2_}!1NodI_Bǂ/^8\3m!'(Ֆ5Q&xo 8;'Jbo&XL_ʣ^^"Lq2E3,v1ɢu^}G7Z/qC^'+HDy=\]?d|9i,p?߼=\Ce"|Rݷ Q+=zxB.^Bld.HSntºB4~4]%.i|҂"? ~#ݤ[tfv3Ytck0O ͧ gP\|bЯ݃5H+v}$޹na4p9/B@Dvܫs;/f֚Znϻ-yݪkIf-8>V#ہll/ؽnA(ȱbAj>C9O n6HNe">0]8@*0)QsUN8t^N+mXU q2EDö0^R) hCt{d}ܜFnԴ.2w⠪R/r| w,?VMqܙ7;qpUۚ5Tnj ۝jlN$q:w$U>tL)NC*<` `)ĉJآS2 z]gQ)Bی:D`W&jDk\7XD&?Y\9ȢG:${1`+i n8=%Ml%İȖb7AޗuV3A7ำqE*\qb'YpuHƩҬV nm=Ɂ-2=|5ʹ zi ' ׹U>8bK0%V\ t!Lku`+]c0h&)IVC)p| QUA:]XL/2La[Xѓ F;/-rtx-rei0hE˝ݸDt#{I} `v;jUvK S x1Q2XU&6k&lE"} Q\E)+u>.,SzbQ!g:l0r5aI`"Ǒm O\B!,ZDbjKM%q%Em(>Hm 2z=Eh^&hBk X%t>g:Y #)#vǷOV't d1 =_SEp+%L1OUaY쎹aZNnDZ6fV{r&ȑ|X!|i*FJT+gj׾,$'qg%HWc\4@'@—>9V*E :lw)e6;KK{s`>3X: P/%d1ؑHͦ4;W\hx锎vgqcU!}xF^jc5?7Ua,X nʬ^Cv'A$ƝKA`d;_/EZ~'*"ȜH*Duƽ˳bKg^raͭ̍*tPu*9bJ_ ;3It+v;3O'CX}k:U{⧘pvzz0V Y3'Dco\:^dnJF7a)AH v_§gbȩ<+S%EasUNfB7™:%GY \LXg3۾4\.?}f kj· dM[CaVۿ$XD'QǛU>UݸoRR?x^TE.1߬VwխmLaF݄",Uy%ífz,/o/Z^]ݖF\\UR7򱺹...m/~q[ /7n!7xB[)9nI [GۿsH\ow!>66}եl?|i [%۾s& Z&el-ɬeb.E)բA l1O,dE>-KjLOgeΏe|Bf".ax)֒t0E)J\8ʁ,Gulʂ+lh)6tqd!eó5d ¢ku|M"kP-&ђ5h ^pN0[|B>+q"/[ڲ&6!%<@fpѻKQ31pxFP>TU?!$VQ`Rc1wM "U8V15> =҆#xɮ}U`۸ہt=|X!~Pu(UeS@%Nb:.SZ1d!~\<}LY aBRJ@ѥuȑz.# 3tl7 ]وb Xnݔ[TN1|ttc‡-5=VrPhE0Ǐ}Wd|\aD;(;Ha.]1-{s1`HbKV$n}Z+sz'ʀ*E%N3o2c06JZW?V g>ed\)g.C]pj|4逜*@ nBID f"!!*7kS4޷V+8弔*A19`RI/Hй qPq3TY'퀜+/Ĥ'cp2\1: 0mtH,.7>\hSؗ΀ѩ آSNEYdEcaLF&"FhQ|![gIK v~,Jc%+8[dI368fp*CDrc3k.2WM:UbX[cO;R`RA]d+w!e rr솜[/V`+@;Τ`5d0ϕ_Lع`C"cK>JG.}Ε00e>& 2䯫vNj31c$ i '2Sn-51Y}rE~b>|Ď6Oj~ebIapul9| 3QtUqSCxTD7U9/nq.JYCtuc nrCtVDƖϧ;INOKx%'t+sFUJq:ǫf!NRT1D(3.8Q;І?O+JL0SU%jfˬ1lމZ|VA/.ȍȱh M-r ~[0AG꠭y*8D*-Rz_z{/S[*"꫒?`a;N6uilLn<Yllmb rY״͆jqTI!j.Pٱh s!:W_´KxA|Hk1nE6=W|$O -{]1Ak$ ѫQ6Plp;3F$RveL l5`:~@c>q,7}VE-Q8W70up˳ A¦g/OEU:غA>?=CۣPqȅlW11/$f*0@б 2Dݘrt +qrx!8 J&[V =͋A,z`S,J|L/vrʑ=}IhM4fG(Ȋ1{TT%41Oa'$ 4NcJƑ%*Wdf\'~LN\ J|=k,՜XDn6}M%HcWq~DUa2,[ զheJ/i3jPp~-TU!J)ʀwdYq%oJ7@(L\BI~ )bԼ ͭYq{\~*ϣB6Id_Mg~o Ne >5C|JB5C|SS>C1\ '-ã4 ξFaHGxbp ^F]oR! 97xl{nZsns?ݱm{M͟yT4WaYﵠT+=(oDb$7*Ǣ(O!y<^kXo?YM̵~H~]y#xO=U-=d1[^Om6ludrݽ ohL#uGZ8MbɪyYgz}UIa{Qa9 Xl@%B2 ݤJWyx;2;J־^(ʼk* (<ϳ]鐐Xg H Ǵlq |uD YV߳,y3-Qv+qHQ/Z~w~ W*<$Ȭ,yQ<}=LX繼CdY-Yr<~Ild+֌7N!3Q1(DH,)^Kfy\ ^UI@|rdQ\PU Jw4Oա_Zƣy]yXMj^]T)T[U*{+(Q bgWZϖJtMJu)$$;̜bĬUQ=R%џq(2 &PB>%nEx;K?atV.c{BpS..y|X8O(l 4JG'xCuG ~xRx&jYOH+_*ٞd{c0g#>uWƳESJ>2q %n?A#zF"I/vyB5m=s,חE+m<W<ڶudjge~cvt=5 8INޝ'y#P9}ieӌ EK&e*'g姏Nxu>E^>MRGe8o{ƳkMYՊ"7ΟS )ܞ}TeV8WִR*|LvAQD؇+>O>?F`m@Wټ>ge V=DqB|[̶WU>g%zE1g%~p5>9~ I7*w{H,TJ@nYeZO - W-q6ztQmg3yx)a]6'7]Qt{2; {2>,jǭ[37 MUy,S6e\_]Uȥ!lOAL~cW%ƪCPp|\K|i"ųԁ!6TYI$JcaNjՀ! >/{qԶPbsTvd\9<0PhKA$/0TI:pfyvMej!:N"ϧib$qׇɚٺ' *ǩ'/df\`O*zh\v@:x`fZzJ?Fd%R_-6`IpѢ4]zl$9 Gm>MMUV%PpEv4%цh=ÃemZS8nOcn{FjۓTW?mA!AhWosBkSB*xOS7pڠ]Oe( b6ˣKlCg`I.s4eO[$ PGBFICe> {50HciYֆ`S7g%+kdo:q!*ܔҨKUq^;63傶oHہqȺG^ŵ`"/!b+ۛɺ6Yr%ACLI9G\Qx9UlX]PEƷ u%%Cmk*P6BuψJ-uՅ1ϛJ^HY(.V: 8Xse9K q6!0ա6875{PDQG! 0”SK b."K3ڻ3܉_P\)5A:UI5bUªɻ6XDd;2Q5/w!F? Ec`kF!r6Y(_~2߹-"f) 10agF!1Hoy+.g̡21\3z٨ f?{۸ !`c[(Ey4h3M&:@D6S#PNi>4^` #jnN>_oH= O%"X pTr"ofuU<0dGET7\ ]'GK6QNRYu&ѭsc XXh+`ŸG}|Td͏??504{~dkUn(~hKU@ZNhɰ#Sa6ݩ2b  @ /dPif  as1{ iac#{wo})(.߱; kPUhŽ[,>P*~>y{K pD`R Hho|Ũs#^_|v(58 {bZ,n=ޙMt:} <՝#vf8 :V# *;qH.toz!  z!8L^W@n/0CH9v\ߏbiߑ?B0b ו,0N qQN1J]yk tЀ~D-U( bc}fB0auXKB} 9(kha ❦i`145`O();x 44_EMv(0ow0|W/׵9˩ݻIeұuzrh eGN̩CN /qNC8w _X]_~Q(ع4w)k# ]6v ]Ϋ&.zqchkd]Ksfߣ3AǮT$aV (3֏$ͅ3/g+gACQ*vt:s LG G23x= Aڸ|.gp8ͽn9ar8ߓ>*x'$zn36@ z͹7fw~Wi CCؓ-!PI֎cϸOi/Rp$ >Q3YwQ70:t~}GDUpTeVխ@ Dv?܂y`R*x`6 ~i3! ; 5P7 {9\GX~g/U3g &u0aHY#+R^=nqt(A"SR-/g> {1v<}VD'!48tf|sQ; 7EwY%Z$:I:0=rTF4s q).X4Y]ȮR=*y=E5nmqigQ]Ә]P:mn`EBͪf?CM6eNSlD~OQs|U hT?ed|?I%n"cg4[jzT5d nNyk5,0}bc@r`ի{G9M{ c g]ؐ54ocrn`yz!6AN@E"k3Z'0d?ŕzy`JI#PX}i] iz}8?܇IU6g.Gr%]url0pÔ۰\:}z74]=m._ t{Ϩrυlxn*#ata> \@2Kpf H\Mq9nq)jDF.HSYex6 *lz*Ewu٪.l6njlGx! V8Xk;k@pjӍ j/.1&^ 4^MC;CcU\Wu'ȟD.q#k>ż̶ôZb'ܢU'/2Ÿ6mlFo#/u6`c4ДKyTԣ *G,hP 6UtZ=L\UQtQ&_\I. 4B*1D ɮIY)] Ќ8 M{Wt=$*UWDi7t0={H4lOAV#pj mYُ=7V;,:W} H0O.$^NaUWI& eECo[g?_YofL2勇 Y"elwkEtz;̗+/_fWYO^}û+i@W 'leV{%PX6RC/YU}gx{}F.'cC8a^ +(d4ί$̝TC ~<@.to=0wʄgClʂUx I =)i @Te@̅NiL`9v,[=q|Se齄`#08O_$Cވ &7 =i0{U_㗔gdaydsIb:׽77H9A x2ѾiȦڮ)250# Gkm҂Cy[w_(ek6DOi#u9Tf9b! Ӟu팳EXDp3Jj! Bxe"1}E$:(況eY"Mn2mAi$)eVj^!V_pR? w 4nv ks4]P `K0D;,A 3DnˢZ 6wȤ٤2e3SeIivl!s*r38C&.S0C;D5ḬAwXdU<7u)e,5t%MAؙ-R7זo&(Z2L%HF29QI DQMDF;eGd"3(kg#[a|W?Llf<:p_ST4g3mu81)S ۞oVA5v1=⡫]ȵ]=P0K=auA mDP,?Oԉf a6KE,E(={eW&eY8P}ұx;ORZj杸 & MuAn.LCa4/b>AJT#esc+.Av#~[gYk{d EƋxcXPwum9NiRCԗx' )%g-gm~/AS 7kNOr|;lѿ;01dF@U{>3G@8r90z8-)i2֨Bk6nd[揠?x "ʆvp@q.MB9 "grYꬕsOV jU͞=G#3b uTuPq%x X>Ylb20(/V-s3# Yz.G()Hr&qi7a բKT{LyCZZTƲ U.g&*Vfߐ}5L%*--hVhNrEQ’ Y;͚$R2́v$+h(xB7L>u<?e;^yjn~)!vi+^01޳tBqfI7uHSwN Dke^uR-L6YNFpB=; S#I7+"KqwiG%q=jFW]5ho};M?&?ݮyG㗋};c~ qD?궼 oó]tBl_[ړ۫AS.3Z^֗/M\m~erwsw/'>H|cܜ8UF /#//}潸 |ޢ ~ನu/t<;kc έfv~pog'dK 򛸹`z vs):~! {]7k}Y!t,PrF߇v_,HbvlyדӜD;=y>8F%6 ktly SŠ1/zxnv;vM݌ot*YBL)e!:_l%Kݗ{)HbI'w N8jex\]Y2csm=.A F ;~Cq:$*1Pa5dJ%I'xacIpv(U'ѳ#(Պ\ a]Ĉ%^8ٶmI0cuèn 3kBY.b$|&qVܸE":0ǭQ3AHb~謢/uS*FhdQʹ '>0X=N1NU8z9<E1fK,Yo9pH:QLc*0\_E΢yQL44#¶`İ#ogq*nL pQsU6=IcbUIDB*eFjpt@d[*D$pI,~,-&HW4qn7UnfS`M}قHbOƺ>eGe_2 K-M6 g60NƓT4 zl&HDxڧ@"SQ%p-wat_gN 1 V5BU+S-Cx9C.쐁2Fip/p \v6qJNBs`[I{1ض?B# xz\_֚Q{ 뚆ؾ񚁇ĦgyΕHK(Y# NeoӰ4[Fx˰(0|]Q^[8a=DGفRxVȢ B҉}E_} QE%mlLƆ`|n{*{Mm̎ Ӳ kQe0w~1N QS#M;Ge{y#OpdIW7%k3ZU&BR-9iĈ S&q=4(/5G1c(^+u{t$8Y]u$-8c6\Y lܢLw <`Đi=8//^jjj4[̠,U\m!AA{k}FZ7Ik%DI8䝈t͹xZ(>H#׃CXk> !d#M{THfppf㶓βCkgTʃEmS%pWW&JjF.ܦ7l *,Gp r"n"bA֡0N-h_ToK BV HZIdpgOTA:HtB}ȵop\oGyۛ{k[.婫-I~pEf)ôWZ>HFG2{КƬs-$ѵp[x1&vn35A4 X9p'&y\L&Jg$0b~@o⟕Y{țW$-pER9)sdKz *4U8*";_} !b B {X]~&N9-[!AsPy .آjŐ?G<- ۹7#]\.NmYvZ= Q8%#UOI0S)&dZ‚3ΙW`g$.7ӚG]xE_ɘBbQ4j4ET;zuә[Z2\LIs aWIt%μ1zn)F IzzCڜO۞-QC &8 |]pk$֛AH"Np.[v=Ѧ zzu*V7 |S܀'Fg3\160I'eG76Ǖ1/b5K6&mQ2#鴧ZxCc8gԤ1^TJ3%ɹ^t C\O ,.D5 zZ?lXg1,\}ܻM'h^sxQe ĔK0sql fݼw:\FkcIDͦ тfVSI \Uܬ!GpSķ[&HasK,\o%埧¼*o%$x*l2fanoUoz*{MHmbW$ :(vYCpOR4z䢨6מ .moMe!KLՃ[V}gr ٤2,"I'8>n&8|sy}yD2@p.yk'NSZWÕ}p$8΂˛;ÛR&`js'q 1\i#(D[Am6d6gõy2$fa4ݛ#i' /jہEmjQ\ijIf 15e y&9B Yho*h+!2,}EL~nkDM6Q*@t%kȔLb+c06S^.--z?Z5ĈŦHhbP .zp1m}sfdQ+SD֔-L}-cH`*SPIIVy}ݐ=k%e Y0N3/= W9^{pr0[CsIMg) k]ڶgKWڻ:g@^%TPQhx?e[8ʀӖl[Icn@??'nwh|zذ%0vaޑQ4c2iu7=L1"~(&xs'g-+XHίozg-e.j1@-zqH։P12; 苖 3KQeF6!$:rs3GPIPz0PIs$vணsg+1]'+8mr^ $nqrZ 5xU>U^# s&bM'0 1#Nz9><@efMN-՝7Ry i(Iܫ/@8q0*u^eF1Kʑ8xδj\hUᾷZE 0@ 3#I{[=DS_ =xnp_k%_]h[^n@߹+ fՀ1RL>(๪шRJIkn?Û/`\U-Y q3g'].]HtZ6IJ&c{}&q`ܹLݗ8m9vϼuMVKUa l.*9 jpɈz(5j0N۸f4#-AbpKUyީ*ƤddoZ`|JU\J[7ԡC Ee>Ե2Dr,QJʮD!W&“|\r۝?[?P} W$8t!IR)E%fVL$8?>c +O襱cONqCM 4ymlmFe<oATmr#q0Wh>B\7ilIXΌCZ]֏$z89{{ k\,Øjx{xO6scfr>~e̲\ջf ǫ}q=:mcf84܏V5= QjVKz\7Zq& R~cܼ[!kSw1މy{e+tͥf/L45d˫w7 &"H6KeV8;7s 2o #>$~?֮l /{ܸ6Z6R)lֵɜk&I@ykITkD1LImOMYDͯ_7 $@Mb z(bՔ wkԙ$w U u jٷU n6aV8;Q~E@q[umUS%o?ӭpwk^e[ 3҂-.dbDž;>vguơfilgSJ'q|i"( PB-Z1~YN;bbǙަ;pbp=Ik9M4dŕַ͚x$z.--J($4g蛁,ۥv]~%C[1zmR.C.CL *Ce? s5<AUe\/X8ydc\[_eSzpVr_kQ7<~~i%'$M?o \[- d9|H7M ͧƓEX}B<@,{Yrsr0\گsp0sg1*5_5pE?83?<8N?sy-62Xʃ_.z,ݒ%=MQ>G|8&5k"e)D3j5eZ,V:uͫO*vB1 N҅R} No8ۻ-J/̵ol4-dž )]ry~rw& B$W.dGWatƇoUܙpu/pII5_wV/ppUf#q6컼8N&uWX/]/+_O{( x<*>$=XΫ+ؓQ5>@L}\m/gWC++Yۿϰd=஄mq/hNph5 kҏ^x7Cߙ(pJ4qV%3Wy7!8z' Q %˟2(]V!-&-xR{,u.=ЬIVWkȚ)c4|}{9*00:6S;%棰㇈[ב&ZxKg^ oH4. .t9y[}2zaߺA=yi/\}z0G}2gn$qymefZVbV/*4Nx'` ,wdazp,0h'D_WfW=7ޛ!'j'[?ǵ Be3y&?p!|MEEW!I78BDɽ16=~V=2͛ 2@W_#h*KǝIM+G䱙@a~:Ӽ4Z_=!u*؏x`uCx>s&8#(sSҞ̙&sJ[`A@aFT&p]ɤG;Wo7!0r¤5z%ڀlUz`B BCv9AkO-z[d&TP88qHU< -mH0QD-Qm)"mYh*nP@-#h1jhAc.d n7hBt6>(bܰf~m+cq k [T{piFOioW&MO2Ѣj5hLd zZ: 4F 9%[b+@)\Ȋqq}z(wX2TX+@bdW[(tB'|1OY=e(6PAՋws۴=խ쀴+ H_K<]Pr>$T-uZRGnQ.lZ4YrXrc8&QH>f]7@opė,9Qw9SH[YE3&E Aσߓ /.(;u|1T['pDwk{I^]9X[kDak%*,Sݹste7W?+JzwJa:K}ƹDLOK]]zjz;oX{UBmx -x#ۥNvzR:=Ygwcg%fFHbJu_J_QKnoU(I!12=s>rSg2 톩 Rra]Ƃ~\Ж}[50 f*ڒ*^@l7-=)yxVzpk}UEڇy$= y?ۊ_^ई eƽF0 N&4Oྸt{]귮!ۨ; F̀MkKp>%/7qmfTp]zB7߯Gjf42֖><Ī_䆁~M=#?fB>99?ĩdӁKLjqr}M>N<=3BXchŭUL-^ZMbE{Kgs̍T'=|(+=BA<]aK.}^bpQNÍnKֿt3r]w,q?;$/~'Q/#``MqLh[EHjgY +OQ.[.ٴp?i!淯@0d=@k`5mg ƍ7 jԖlR NZD4%H cbZ#JT:'hehP#h9`Yͭy"N}\'ۛ^,[ﵱEHj?&XI[,!bYcCN[ px"ra B*SNslMjdMo`TOmf떁MBr1T(c!l aPRˁ&4D8behP#he($xݲڲYݷeQ6T{!&5'?+]'Vg#E/?;鷺b$"&Y(sWvp)ɸѧ+7?uys:p-'M^xȓ\]9XfAT`/./ 槗2#f.jA`̋sVu5y|A̲TPloh@Ip@"@<&ޘg5{]KPP.4n$z@.èEaք^\& {EX@ZRCBFqc I O͜vNSn51+ 1i&4n6a57,Z rFl̉dH )11:F.U`3D[;$?v3A m~q,lYhF#)jvwz^yf)ZPe*kQ@CWHoO@6qJn ̈RĭA1! aDG'G$NY HYzC#A[,Z,s kmhZ *Bp ܀@LH6y+Dߒi.kxmA3|[K*Xi\Iዒ2(&0>Zp%N$%2\sMM%mj|O^}m/pYx&>[s, itC40>+9w(u&‰2i%P.T\HwyqM^V^goͫ lOm/g|4Uo(bB5ʕXL(/Nu^QV^v9{j"3JmҧgҾaͳoW s;zƿ7ݯ̴HFwo%_D.b뾥H#Q<*j{T%EQ~#%/g<; `o15fX/ǧP.8hs=?~3EVYF*yy(c+yQ6ݕ/ [٢bd3z[O\-v-.X@^hbPWzu?%Whvj*G j;< %act/'"٥_X Ja84$- 5T*.?BJ%9j5=B9T?fo?];[6-_޻nwï]oB_KV}{~tf|=>5cl^Z@)|:`9w  'mGBn[8` GLr494ǩF>`czpu]wJ<=6ލ3(f @z]㹙K.5yV3QHIS$ "+7u&e)Dp&1'Rtwt-45R{?PR([4P5yqj@ @(\Bj2g*0eN%ZbZ0BpW>%&ՒQQ.$&6(NL1AF%A#S̊*$P5()IcWK!7٘8fU ,Q4H4U$\%# >6#'q}ue綒Tj).Nx8IV00s$V +#q'q]'{C68bϘJ!_FZRpI*Y;q9Q A%65{OuF0=w,3`Q; HҟR4F̞ 5d cP 9dk)"]6R~c#R1 Xʇ1Tw L)y&G~).Ƌ@#;4 W֣5A|YoLD!CtB@ޛ"̒G8&e|p6RmN)B8/?KFj(Ӆ ,fEДF r4R[OJt[;n9vz0c6Oqe1У$]З4Ak喂RԧWfPWLX^ QoOMD HöZm)3#wGI[Ǻ:NV#)G fV9BYpeE:J AA]68z0EĈRbmE>=c'T]&gGP`]{vpEv۠ Rgv˜l1e+>nR͖2|~ -z(W"EPPU$Ә% d>cw. x*I[ 9xyaJ SG`6}¡(+P#%*.v*C`q:Qe7#ඔ!l۝nL49~4 Ud Bӟs<$DBC1.`0 r!G)AFM¥sEtZ/Lh*#*9JiA8xPnqETsJ@VIUFRGQ2weٕ]Y&IlKӨX+2XrƳKUXR@LBHkJR61Kf|9TH_*cUcqQx9m`]`*BG"Z֝VpT}aɘ4ȗ]H{4Qws"JŎ'BBw贁Yjwn6Qvp^%;Z>EV11VXkۍn ?=wHny7L}9]Bn붸h >RV,4C(pXkXzXDZ [2#+C%72Wn:э#_.n ##!0,lȣƻ V#I(3\ z/MI$&ܔu߬T+ G?E5_Umiom[)4Z"ewy{ sώ1lU)[4lRqxKKiJرt21NLtF;R~Yƻ[PG'l;nWwZiuA(_]ukx)T0pTObT!!Jmdw'[ fQ%2Gf[41J*r܉D3m(M,ևAr޺>EKTGm[vۮO;,冰>pC,o"RW jc5>_ٺ(oػsT <6>ޯʪ]۲2nDPM%Qnb=T]ĒQ%֦p8?Wo~4>}nz@]o/9#}_~}=X=qnPwJ0q|TW"l\WN-]Ot2L?u}z4,n47eBIA;i<{]|h ;I/O+Ȫᯇ +(:L|+{7 _~+A>z׍h"J7[yb5Yd9d1O=B}bS?=QJ#pFF9ba-VsKQz@`un;U~ijXq!gNWoUwO.Yco!à XcN.ŮcY6,*UJ}9ʕ#LJI$H0Q9n7 BoTcs%,%H~) 6(<:+Q^g`v6(v[V"ޛRBZ`FO+$ym<__WǓ  Rb/JhX_#% 09xz jEOמ?Yb6/sau kl]wHmX')XNA% - WN|^ڹ@+Z2DlV*+6_!}47Uh7syRHZ//J}و*)pm+$^FqrK#^YT,Qk*|JZ%0 /']Ț+.tq-F>tɡUpWu^Dt[mBWҧ!AgRuA҅daz_eKɗ.(Vទ.瓫MecR)w1,܂HVX_S-w%܎"ۡ.zl# )*S gPZY"6P$%}`:wBɼ,tf ܢLՔCM gf_VLް6l$O7toW_wuzh} ;juQ_.|o\8?."KQ۲SfL4Oa<V$t9@M/ǑhXOŭ^3+h xNrn=YZLWlRzE?~ 0\.26cͫ6l' _>|ר/8LY,ZjCݪGXkXVM˹ ǚZ_rɔ.HvVۧODMN$!$ZUJHi}ViRY!x~pavÛMI1>b$yCs.mټ4^Km}>Պ](^T}$mn_[B y-?j{XȞFҋVnP~1C=زESp '5tyt̚ߦ3щw=mzNb]/3pS<;Y*ZLpS䭓ȣ6W^cOi9e-ݵMrEK. 26.CDFY\#;0ۘ #)=DȨBB" u`$,ZteA{"@4;A [RNɥDMF6H> B$DF:.{gY\mEK5h-ggs RR9*%R,,A")2pl-$v D%I3/U3ˠ,}9GOD`sLUH40 +x{R Q@14ѮlȬr' $MU# $j" M AxlEIQɪT^Jz!#C #,(pDt+3*m`IIA!I`Z>`DR2&0eI D>lU0ͤ.I>*1fm)$ `wt)R%NBG ˎ ,=l~n6+j1"V @${}O @D iQlSC`v{龧>mO[$pWO Κ+",S;Q^eMMHpik40ᓊ>EB! yx&P7F5F)< ;[Ct# nagK0&l$80)8ЙVN%ckDX[ ~3X$} p4@Ur44c@HqMy QpzY,"=7H_u2TR+nٕ H25ti)hw $"(by+ӔqJ̼]<Պ!ѿ*Xɑx 5J> e0>*2a"Ǜ ~\rΘLgqx]/[.{1/Yb5#9I`k|,h9bFeaąj Yp%*hI3jĖw!4q`yD-$ >:< #.m*MTV%k' .&5h1Vcc(3Hv9n7VR^ cANF<@$9-)d^1@T.3d:Xe1FƁޓA^\Hlm}AԀR:[K#F@8ڌK"Y ԏ/X?Nd_ż,+asy`)F^0No7Vr:;tmޝwy2fVkB䮩c`AW '\b` #.eyw<$(/_`T! ݀R"$2=f nu F5c;&20jxrQG6cUm^14F,49ims$dk%&}` WQB|{ɑ]rMj,>Z7FP&q$z Xk0m@vcNtaYj3 _45be-ʾkO0%7a@D[9>X2^@dQ` %.NI%n|"i0XӦWSD jK/&b] 100+&1C`$1DC(;)R1[`4b:_u*+z3!x crTv^U 7Z0̺R6 }S|*TunR9p0k.7onD=ӳ/r|_ufTl2fUͮF.ϪO qWװƫՇ_X?^ZiUuYW*B#n֜9d́c~3ZgdmLq Obƈ8"Ntx`2έ7|{Xia^phk ayqiZ>`~xu"f D1Y!TyU lTzNyׂ!VEGصZiَa2Tuj"R7u'r!\?ujTXw@ qq $}2&R9]eJE櫠G*,JA!Uہ:4 /V %'g'㶴fGA_jHXZ-ކEyvlVFQ}f_=Ni< I}>?T@vPa͓ZNdGk6<ا{`eq jb~|i\ayYfm nw/u?~w>xuAt5nWTB >7xW ' `cr^1=`)NHύۭK-Wl9z鿖l}u4ϱM.5_t qAڹJ)]]rQ\߾'Tګn;1nX6 ;?~,K7s>:!|2{_fB;yןl;YqW⦅c-06y;l0=~?16+ kGWDCJK? ݁n32ߵEhp"p8Qr'!v} +t#8]* "ֿ]Jwv3#{|v軱y=~?|+ŸRƲ]G[}X -_v칫_}ҝO;c=]֚Ws?LF7屫xY.X.7CYkd:Vj޿G]g7ǛCg7>M[4=mn+}٭y1.ʮ GWeOEOh}?V2g?jnGm94g2nwt9 I׳x~NskX7d*Fßo_Lz?DNjh13YR:J_C ʾQ41J(X{\lDk, |:ڑ/AmmFüXMhOzd&Nls' V_3(/VZ ]sl+RMTW{hyj*j!*lv^}:5~t `FY]RTkkc&ɤTw%m9W:t|\;"W6%_(J"KF59}qd >f!ZVUf7 v *P:d}mBlO3l?d ]y2 uXDʹaV˩%[gȗAie2yLeՂGEeώ7aeB #i !7;Շk s|@;(XPZ(tyUijo^H ̔G0ȗV쇯';%CDA Dsc7˙'/Sx /ݪ OuO=@EՠDcdimBDL8o&/KKtȕ2Fj_Zdܪ:̓)k07$7<%cR=)@/hލ7tsVzɇWB5/ /&q0!r 4ei5s Y+FgXG 8^ ƃI qiOn^ljS*8u6WLIxrɛ(|!B?. ~4A2,7gMeLGG+G?:@WySj&2mk m):.qD]E?>ZQO,9*!3x*,[37|X0m70+OG|;6o}qs+JjP004M*rejk`|IK3YQqSX/lM_IgM?[5`c +Z2AqDf8@'ZYW QqNJEL~6O<BX/|w{I3$  "Ztbr6*“sЃc /\/P$B$6JB;#H5gX'?R"ǹAByOx2+awR*e1h^ڡ-Yb~=3S!N*^$24Ǜհku_=x|E|ձ(;)NM ~\,u޷6a&oƳ«;v{|~lp@<Ůb(xD7עP싕6 #| \eG3#/ĝ՗36| rag M|Jq΍xs$b /  ]ʼ9v6sڻH_{=wjm㜘,57]M HBUpVp ԔGPBz/Ovf(xC90SEy.e'g3cl&ȗAng+|/5i+!Lj\(ڲࣳ!ppoq BMC8φ*$'V))?FiYRk8q|!bչJ#j2J_Qa'9A%,rccI 1 0qDLdr`fj2Q9cg /W3j@m}uP\hJBLEC0K/21sqAxs$򥐫ra3E(;.F/a31/猬)X {Z zjγk<ƳkHKv\r'JEH^s~&Zt/s-kRp̪eWijcO2߼RԼko,k86쾕6'q^v:<ȏ;oXt? u9(t!8=hBTe{4Z-20=ƶ|)~kwȇ_3spu|84Ҭk D.UwW")1u%v+eꖸ wy^JwO\rl=>aN-_μѿ"] =)9 QMmR;Z!h$U/B랴,+UĚ@tCRXS}iB SK"_m;n̤7ʄ;dE;]fũ.lx˥; @! 9'*Nt- +>-қe:1s_rom1 XWeȴ{\zY|~q>#M"_ _oŗCefx}DVr\UN2zL>a9dϮ-'mh RE? )1zN4zNéV5Sz*]ʪ  ʺN6֡:eEq%{ԕMyM!|)v嫐kJ]HM#GwutIkʮWٖ[>st)wvuu:3OH /wК7Gb0fBF1aJ ūB=eWD\a_ǡfP?Ew_Ozțe sB JP aȵ|N2 ؂!/a acHxfP1*& |I ϯqL35 d XBnYA?R dh^"+D_ ESUr=l}7aҿbzRϹ\ꍓsc, /27C:30]cwyJ,>58cD򆅓/}ṯmb2XfԎ"a{>΄'ǙD [T|xL`śmT#+18A((9V6qoo9q;r?<fU.c[ocI)L;!AvزCijkHl9x^ʇ03t*|@k6j AAHKAnn,)WAqa Zz0g'WLI sADGLFAaBȹd\C"_ =>#U8]}am kN44v Vhޘ]L*Âl'N+t!JۨRBWC'!\1Χ !)OnRo3m7g"w8c,i'Ԅ7C92+HYѥhs;TX󮹢J'ϥM$PXR*s(N6Y`}`u8I庙JO2~*}c%lXSaZm0;=G["YܺRR,Aem663:0!㬕 okPMPhyݥBϺ[MxSrRsc3\A/M\JO 3RE/Ir1$򥐯ؼu3ZYQ0=Ԇ縯JHȓKC1viHK!oU4,M=~Ψ^p︓)xì sN7,x?V1ZM4-+K| s[=Wkh^͑Pn7c\D1=6Q |ݙ9JEKxT ! %eB+%N'Wߟy( ! ΞDYs1J:sY)nIT6 NK;7?SN Kѽ?=F'CEZy=w3e )~ΡbhaI6v8q3:Re՗ Ԟ5T7jZ*wA䆣iR4ʓX|)+6}%sYtZ)U(W.Yҡ8qxm$㎎v[@/뮼IaPBSJx V~RoYydVO6!8ä*oOjT<+HUwzz&i ]+Oǥ84*TxƪcīH t0(ȗD㊖!K'<.o#]aA^ {dxoF-gJog <ƐB.n03e;vF7Y̵hP#Anqy^ʄ-{ fG [5zJ.GG+*"8 sJ~]m㸑+FC`qmr!EJ{euGJ-ܽ. |XdڏUe>*HKQW*+`…P׬5U_.yJ; J"|'M1)xkeL /хcDƔy #opDޚp7` JOjyf+]~t ʗ6d,8 Ǧq_5̈́;ꇧE&`wªREL\nNtSo7 kS>GƶH9TJFÿK T\#7$_|.U1"㟄ɮDvzI,8awxYPKr[RqB<>xoiÛ$F2VSܖuN.}\kvF/33ˇ˖egRC_g<?zWv„dwuuOa{psQ詃r_15>~h)0GF׳i9/OF ڌq: G4mW.'gŚ$b6AnW)ºK%ٺn0F?Z)ڶfNA\}Ez4)Co2 CgN3glóEe0qkmXز),F,zJf8";K- 0";5XKGN! "?hS;vj$a2(2^|pmѽ7?0F[5R0Rk2K2#?> : CŅr/ܨ3ڡ7Bmvj$ac^fe2+І<ӷc佒lu U3ao 'Jog`~֢7,BmYn)Lήo;yLĽfy\cq_{GHS(FöLjZ+0RlNoB!OOs ],>jANViV~9lD–7x=IfK-o@qKI~-zC%DޖJj$a4X'ж(bC[@1Ӧ'P0|¿XrU^F#wH N.$:ߞ#^F)=  y8h= >\*4jEXBCW#Iu*&5_XZǧłtFa ;DBco  ˺.ڡ;ZjFKLNmx*0ь 1{֌<^Sҍd3^ ^q|(yup[??>)?cĤ1]râ?` z\/ڇ ׮6 _C/m{F|Q#A,o^UF7jϦUrސb^9ܓQQ[oq%v1EvORb*_)bIde^44QU()riv >DZYDbėߎ#f5?7>z2ʹ^1/g͝-fkqwLjM7e.eb@<%Q"VoH+V`SΕ@8 ń9Z7xoN|⥹@ Z X*n!AnCw:F4,52zHD-ꈩ'ͽѾt%«^DoNSv‹jaT!V9%R9 ADAR;"UHb1C\Ripb=*0?) [RÞ~gw>\ o>KFb.%[x~p\fU0h9%\3YWgEnQHpDd4BZt"7AR!INy),$abO"WZ+2@%1fa7;Lׯ:dXAc)܍@OәqVɓu>xp(gyyרޫ<!]I f[ N|"ETen䅎[ڃ@`|0XU#hgdp~^ ^C'HhkԫH噬sߛBeW ;;Z`&;bxsK8ќey|16~_x,ǞY*g]R#A f|A*Ϝ3+ח&i}p@\kjuYzV9܃_+?[Ggm7a9b7yP[kxFFau\$fhӸw;}K#pےg%\>ei83 &u*`/  ː BwV$by,X?x: x&QAp7A֍kڎ:˞glfa5F^Mon/w??Uռ݌V)ӚZ |}CT+N}l4lZRa}j]}ZA˵of:_Y8ly݊U'^97inHςuy=yY4So{'h#'t~<̾Y=u<  >X,P~0 sުڶ/oC[ٿ=3+ŞZ\]7_kW8>Y_oUPk%4|{l`yQ}YZ);⼵!}Sk=TҪɻדzsH)[tՠnVݗ\} =_.gI"|RE;ɂhT[+#4swai;NT6FyY;M˅ъTvJ״ Jr:S{#_E}E&Lsc86Z]ݴ?\ql?Ztռ f&$0ض^9w߻|l."Nz/kֆ''Vzݍ}ܒlZ~? m&յ<#-!kʮU-yeN}Oǰ0}}ɴm~< -|BhjE`AKq \?xu9n%J]s.+tJQzu0wu"cxXU@i>wII'ڰz/7ݑ\ ppZ]0.G;Ɵ~_x\K!̘ZQFmj#҂7h׉ڶ*NV83'UҪB5,n8tHWU5`{ݜ}i9 (N~x Ƽ[Q1 f rVow-M^4kky 1='aR^~U& Tkk29޺=` O`'ҕ7Җ!$h0?-;6P*-Y\#5+tL>f < K5󴼽NDc4 K?1L1a\{t_=-ȯ>h^ܬ1E\mFu=ڸx(:>#{ټOEګofE񉴬XA \<"nu^%$amB+@Q3VFXaJ,hp*dS#US7-'j1gַYZ#ۈĈvqӒ$gx>=<̧dqĮYHwGٻږҚJڐ1 L&[Jl15|*YC5Uk۫T} %#6rUΈRCTNyԮIt{K'M.XYjH)ܳXJJI߶nGv(1߶6MP%)taCrJqKy]Z#c/jT3ɞTs| M -IX elrtC+83jJDӨP&j(+ִ]\ݽ9ijmI& E{妹Y.N{|C (8wOuΖcF3}P\xQi9R OfubIqsO(Kà}Q%o]ŝ)E ]0YS0Ig{w=6~5Ae[<*Dw E-,`g8RX@4称ۥ$嚒(j`4qr8]=.IMGo!Bnb>)9/XL eݨEpݺUs|c\$rvv0Tk*g%u(9N i|<8Ẓа['1%.eMZ2"WRbX DwZLOh*^,Bst:>9PqsT #>vLTn~G%ktwc*_3%9Mv=P3Ln4lh]&LDuikwBHUr‹oJc)3jV'0ƸS:'3vnSfafF8ET&&;_]ndo$;$/XCqTJLj.PQ`h`<ܾv[Nn'7CFJ?w7ՒPDo K#cGąiII .r >9?_> @^~_-|o OA⁣G0Dj Q!04#d4DqvRRAiy7a-ZJ=#z~?gb\O1L잭|R}9[<:ʰ5 gc{Ow@߾,]}j)gS6:DkJP * (Ad|SB Omhs18{1 ֭D[ij;VUʷZm5sU ?v.%Tlay!q{ՌRmn43 #+8Rhl(Y#vM4'mv0g!0D0-`BnqXəs"8|/+y lfG9 $2Q#7 \kB;"Yq.AQƤYFiMsa\͈u`)¶7/Z1p `XDRHV]j8=bgEdRBn,|9F0AN /-})r~mvpVى3ْ"*/3Z*|)l:t>Pnps,&‚;7(74pJ@pD"7[Bعˏ-+.(/YR9|ǟ9(Ҁ}V/#vӕ^xizbAeއ ZqMyQP ̿vDOb1ib>Ż󂡵N_5w? #38ΎĹz%IWc#QT22#2SfQ韎V')8b,\0hBysѵO ]M`&|p~D$VpF}jWyaYg`-yߵ3 A~}yf8t'Z>QAdd1 fq*SXO2i/Fhz ;.++^pA֧GyPg^AHpcPd1a.Qg?I͌Vyח|]}#%3FV֡¹\KM:@$]㵣47@p K;e9; #38!ژP8Rꗩ6TSP\mobDga No-絅mLCd gMHH{OCuk(rʊ+` _Y aڮi`5!l-.DTNښ NKCKa ~ԥݏ'H~\Mt$R673 '%Ӟ<IXNf= ^/Hc&!lRI v3xRe1fpNt.*y0tt0Mbyy@5AWn3Ǧ,QOv3r)V]Md '-gukt)J*%P!FDTE@&2[ւEV83 q5RXʹ]綺*]ǙPIRߖi{.&uxq1t5T4u5 b a4 |}Dr v“*NRC 0d ]u/:$8fddǤgugm(0 #38Nۡ"w;ERO2>`妹SbՂARz(7eHfF)σ/hFY(rˁQE -@,'755xl ̐JA=Y 9i.w3MByI(ٽD 3m`f`YDP=K{{+*3Z`^Wg6̀$`;;MU6M'DˎC )i !ͼEh$TճqsP+MNgh"2mk j9!BԖTVl řQեwG] ض̔gE\Ej>JE)mC;a 4]hĩP$P= z1e4ISGW"ddǤRvg *t@Ffpok E(a⃏Md% (kn@Ffp2.u7&F*"yCUva倆RiapqW/QU~:Ci`tqGKJf!ArW>-p׼Wh9LL }-"1<ddGIQ ]UG0Fdd'C&qFqмWhT-y .CJG)t6[%HKCgV a}A*VAg+d5{1^C1QtSTfIS%ʑd^O'yg& b0+<)%Y5A,!#Õ]sƢx.X(zC' َw<}9.5cVqG!QҢ{ eZ*=ׅ~m;+8;w`NK!?rSH=kwq#*/Zń[fe01j$(b)#Xsx)r&R0Vw<{U0~ ;1AkR|y tG(8)7;L,6YCIJhw,oB +nmfhjگ^eɴ]2 PU:6i{xn/{Pf/~\6`i~?] o9+ `$3<ăv $;"Kuˎg0-%K,ɱtAůd#v{Qj_n??޸T@ f>L`(hCwur]/zg+VUU.|o }f'0A 9RdEw? Deߒ3 r{K_  ]d$$&m`|X +(*b?}Sq=_c8NQ;Cg{ikI\4U8 A Fu -*'ȧ"N,Ybٟq2gtF 3qQ!>2oDŽQ`D$AXUv35pj"pG(3Jsd<7C Zg74W;zW7zp# @2Dvԍ1* l-Pj}Fuz݃ȸ|w[庾_j c|2qu %ˑ@$jRTZX7(+8`5=[<^=y:N&iR־`mSm$}=vXq?r~\wu?jwu/*1::|ww1zkPri ~I>\vl,D$41K!,JXf=Wq/Y!PyU/Yi_v~Y%bﻰZ)6 'ANQL)M–uuiL&u0&5}L>] , /N[c6l^aʧhHf='%.28Im)$fXKbNH 3 T}RVdcDŌYQ? l>wؚ!Wdk \AaI( 2Tg)B>8<\hrx=H+’?P)V>~=,Fqˏg.*Fঢ/ջ`yhӠ 0S"{Y Өݏxh1-fJ/Tf$|^ '}VpJEw{;Pm0aڍNkV릃k= x] F5-hGfbgrSn#>:l&nۡ%aZX j&4Që&*1tDU`w>ujZ9޻'Cpt:m6ѓͿi!Z;P4 ՃɧkY!=gfs6MhaDR%V=Bh=x{vg!g2p M8 !'Z "OVc_ T*,|s]*kw+&;~w3O]r<wGLqWߐҤVH @M?*P.>,TyzG7z15_Fj.]Dx:fSccL"uh+;KE)@0; 8OMj?TCoM '䮀`Fɸ@.=]cwWJѸo]Iu]7w3?oo~|ón>֭TY5->8wq HԱSq+9w^;ɭϖu╖si #7׌_){X©<lj'9s` &N%(5aDD95>Xr 4XFSy4!1J06^!oNRhP{-CK V#&*+{c9ᧂ}o 4#34igΕwb'T ^[iz7 P߱>VX<gD L0xR=  +Sq IS!d̔{6SA0_z {1@Y" g#n^b@lB\ȩntF/&B6fU(^uat&|T~W&=W~lUyYk%Wau̮4]M2?wec d{QҟnfuhUwYsV^6?k2;Q|(Egpхx(eKH{ xKF7| /gMp'X:OLx b^3$s\rkӉ,ޝ·_)6l%.r+T\t}j/a"椰5Z|; ~};6ͯσ.jUzPfzVTn eNe"ϯM/R^X7pcdEp˶؈[׿DO8oHƓAO>,/85NGpoչˠ_2&O|Wmv񲗅xMlfQKYMP[=¨. }(j40up9*HkLgdoC9:2mBul#jh@I$?( ^zS_F_8lX؅`dGW0{HUI?.R1a82D^a\߽|يJ靵@ v/,a/Bk2 ݖ'"U93;=4ߝj5bXnUYOT[Z?6 v#[Liar } K9uhu}6 lvh ,3{ndZ5Y^<E`'TX)r؅$8p8%bR&yg)I ƒ(YV+1Dm\=#sT^X=w)\p#c]mՠ@3\nZ/吩Cw߭Cx+vLvvQ:վ&8C@ެM|H gH=\&^&oy pV6yO?g$MƸo1*rrċ_ҬF@0,|kg3LzPf1@ 2zPpru 4X𑰠DJǂknI\V)@9&rc6ޡ$x}2,R Ƹ%kMIH?B" 7 Ν3;kw@(W|4y$̷zo#P ?4Vg04)%n+g3S@ &VRnR1vy4Ŝ)j$M]qO`ۆsӅeqA]%YSǶgb[=iI {$':2u`NL] WS)Sj?(ٔIh7iݾnlo7vf}ݾn1 S%eSjsRMf}Sj)5ؔl >(֛Zx jj5kj5eGZ5/R'""1CTĉOĬWkǛ0toaqyǢo5R<Edr1DPVo}lkM Hj-)C,qj_**a5 [礀hiZ!1?-<|#;rI`Җ%FPʽpʵD$Nj:;ټFGT[=_t%= /3TkjbLooޮW4nmnzz,;kr}a0)UFXK-FrB"1S$NgSq)b*l ԦMMIÅ'YqɡNNpDJNSE a)l_b,"C`ͻ$cPŹ}V"|2VƺT'T{a[)PTcRyxĴNh!*lQ?FۿԴ!{OcH_ݐYoEۧVe_&-x RDYb[ FgGWC`v+ܙLٝiAI'tӍ|]}*SR$Xd2E-Ce; ‘C#LiPc&eҿig9heu\ݛW NN$l 캚\J˳ r g?V> <L|ǡ :2"Wo)\YD!_sn'-T()rԖφ6O.̕G˜??M‹:<\A G͈4_ztQ9okh0]s7rW8d.xg'\sܛizڰ':Jrtw(%O>8D.X\^tc\~=Iׂ #8G:4 8 .Սì.icL;3XfD_9>_qTF Q7`5$QK%"]2,җi4ͣ6Fy?&KT{*\ƓGa|+.oP}|wwo>ǟ?|{ \P bX0|g+ zж8܈V^3n yøwZH]H=Km tۛI|;>Y)vRW$WmQXmNcB {sG6],;2J=pCxr1&K VtvX#W%`0`RhЁ (2Q(ys.HSI `!jC`ӬO8svw18޲*^aw(\Ѹ6Rz;tm>9+f!F0JCUIU<]\|čVG諕;ֹe2l[Vv,NRu=*aPF&$4- d84L ǽ _=hET2T&TZ9X *$ dIv*nٻ6!--w;Ho\wax~ze/zzvߗޅ[C{O¥COﯰ*565kSkSkSkSkSkSkS/02pOiax"®nzi!2}1)=;ru0|bu0v|Zu0n M};o,,؝@8o`((]ѡ7EO΋?OgnVܭR'>zen:~5V e'lIBG-R%v&G|;M^ukHt>n;XGn6N磳(+'+dHDGǣ(d= bm\xL8Hw8_F+f}pv[M-ca7*_[*__BB~f}39[Uﬕyof{K#TR62BKJNgƩ_Y_#ulr\劺;~(,Q?YtmoMI gN!>'zfb=#}VajH4C (jy$G[m?o-6F /{B1GE°4{S)*/ES~ v(|E4=VUaW_SwvTsԙ:LOW/=q]dx,YvG~{g r./7<mzGl#*ePBMwBf5puPdO, , ,V`n=:oFKBZ%('PP\9ъb΢\p,_C x&#xu\ s%gE0 $$e{WPhʹqk]߰i0t}I]po.nC\ў>e}dޚ5]17[1ihH!-{`CyZ0 *8a֗1–{(M'ض4 b8)$I<JFPhm`0Ѱ«ρ^ ‘ $u:􀯠p-5 U(rs6z2O +&Rܕ|n&hGH)4A$e)xO -J29NSpD5[2œ]'oGVez&d[٘L_ #X X KB9U!s1$ރ n vZ)" ?F:rRB+β'蟘[5ԉ駵SX~VY`v]fE@|Z_quIW)X*Q~K~lUw ŭe0 יLz]M9xrttcݰylE59]޽G 4]~#=oTʠQMXyËQ\'W!L<"-g9DDxegi=y -;IDWUs*Ѕk 8 _A x-[7&kKqfWN&+ yOBB sfCq;$IR-"ƞE &_|P%  1%%^Rv)G"*z$_CxcY몕B,JKӀrY05Y*>Z_dGSX\(JQSwL=y [lEGK_plz'!9+G !8y(3 @|>Hq鎖m6=9I#l,TRKn7"/1"#ےsB GKIe;P&H(uiSh淣GMz "R2Zw(k(/n]A>ӲT)ibaMUO = -KA[ yAZ^cb .LK WPh^1PYʍ&t^ѧ1#E :~5tDQ^KzB` ={B gvmRI 2p#:D --Z/{crT2t;Un`ˠ, -[Z:A %Fh\h]}AAJgWP{\߯zhy￈ˑ&l:\m@돷pq[{vq)JOSJD֑LDxP.tEnjݓYŸ[_>9hW@NꘅTC xۃ}DXJ`Lj󢼮t 5Z[ ) ]m1I>K MuTwN$@|}$m3pK=j(/+ (+ItTeN%1G)rP^Dy &^; ϵxaWͻ[e{?KIC?8^@yJJ14 RıeatIsPhK5seb$"*A|TT91(5ZהtR`'hjEiX24z83(5Z7S krpo 8FZ=+(4omN6F Ѷ&%LٲS׫mG?G,Ud-Au Ph^꒏V٦q坪wK}ywa}6CN \JCUQg!"&9<a)fQqOwW (\]t{jr;;Ux^8n`s+:-mTaᇪUArwяS$.}b҄Q% "h6{t)e.-r> _4ً >nC݁ݑ=g\2bi|QsN5cZͲ]>2C%Apu],V[?U㣪Ay8n#\z<.Ӿ-7ۗw[S֞Ѿ7MÃG 0ƃ~\,o M}Nૡ]߾:y/\77ٽum7 99K{wp09v%̩?,Jwlgzó߿p;T;7ؒ;I5;Z|eKo6 'mó>O]߳181I[xW uX-jHuTr+T]r8w6 [g5jԉM.NbOh>{ˇۣ_~tą=z>׽?XqD"Iˇ p?n5 r_吗3ujm'{azy%^5/WW0WPl槓hMԚ+>pʎ,m XKL%kraӔ+6@/J]l˝A,X'@e'$L^N^ΥhZ&nL&Q& ܉}N\}~:9k.+LN#29->X @ĵN.\̻BNe#bJң(*ITf_ϟĜVg#­5=ha{dd)ifb뼡;LL&}(+f&tMLZI(i= HֈBȮs/4#\]l:wA!$W4[[ɗk`뛠AE"sjA3ZI;'@Q$눿lM9S$wh-I$-tg܏n]h4.F/hvU~|*"db)2Vh8;B{OLL]~>{?t8eBxJ2z˓߽;ܻo{uB^0UQ*yX{)ZsPS\RHc3rUeƋ)2."(kyQ>VIwTf7 yi5:8ho {=a>ZpøS]QhPj I?!:RD :8QczSlCET )XQ*T p]hmy!9f~=mt4BE;yaߩ'(aM%x5`j;=wU\*NF~W4^!:G~)䗒_J~)䗒_J~B ^%^%^%^"L{I{I{I{ITO{IOԂHN$b$Ib$Ib$Ib$Ib!g3[A$m&fRPh36h3)9h3)Eh36h36h364Q!>bT+4֛ng)A@f|q>$dZEMV |+IQAfT=ej Qj5{Old^ i$YFԝK)A5A* 5@*yzN H)CT%E`G¼"]}kI:,BY2#,Q;]7@Wm025ZrqlHk,vEDXm&H͙0T"EFJNF :2'eX.R2T!t.*B24 cJ`raTC]2F e@I07+)JQ`^z̚baZXzm0Wգ_iVu4_8TǗu V|ѵS ~.IPR|VҷߣLmݛ=o%2}~P@RJ>E7)%R)YR)%R_A E>0HݔR GQJ>ʢ|Jɧ|BZ#- RJ>-Jɧ|Jɧ|Jɧ|Jɧ|Jɧoc S7tNЩ:uCn 4Q7tNOЩ:uCn S7tNЩ:uCn S7tNЩ:uCn SRlU֐^Uql J+tҫ*%7Dz I`]#xxJ㼵 SM7A8HrAdTOz2ڙ5 t-?:9`t<F0|IQQ;E2HWzE(P֪D.KԎ@  . t.քwν6.~:[(h˴m {)|9J(^ɨA #{sdmf?:7#M튡oY\ >r# ܟ 6Sdj2XdB s wo.M&meOGF)1xso[oHnuo"Vm S$u0EVit* "K opRSRikhor5s8J~լjX!he] 6D/2$/!㮿q+V0W3]5-fJ즛iRpCfi- E 0gZ6җĵ rUW&[]퇍O5OgdHʱru$QHJP"@LOOwϯc@9#TԐPb5V'^فSU&m)H$m6 hM^y%lӣhsWHThHD D ѮOr 8M(rn)֑aYqGkZ t !A-A҃' 5zcMߗ @AhVo-J+1 dPe2W2:_EPb`bҹ$2 k73c(o`k.PHx3jg>NLj9+j$-L9*Bߓ|i0] `oߝx[Cf߹~(flj-cH*qDg?`EDq"c9}ڻHT*Ԟxg?$g6C!)l_Pv<rȭ2'PNsgc=ij}GrMpQUT"VAG8:`Siv H{ N+^ hdaWKM`ݑLJ;f2Rʃp-1Kȳ_On)r~Hv諭h8Ya}T &Re*/$TȊzWGNb*\MjSI$0xE')wdP牖Hw@^ Rr Nx"8s03,mI%4"P (O ؜<@^A(nѻr<KY}7Q[0(zKS(0rBl@L!gQXV"k:ށc,j0u cé \jrc'Ԭ"mS0'0@̈l^ I#10 8v3gvUoHZ9*A:QU!̉^:I0ב40pң'!,.lX7CxdaAy9s%w`?Cӻ}t:?p`\Jܯ#AѝIy'*Z7547*yhX1θ)E*ٞ[lQ9xM\0z3W$S~vW2WlPE7{DAߗb ʞ QsJLvd=>(]@/.&!家ctM< >wƎI>^"DFCVX"15U iGe)ɩu6m[/jaJB@-HG6̓${jmRA@oL&^HmR;G_ui \qKIm[p-v n݂n݂[=k݂nm[p-v n݂nm[p-ն[p-v n݂na:MGMq=s1d<%=S>7c̱|:hYGH(򹖜NPAfx[eTqman{Edڊ*Aim?W/iVB=A*p(פ &uD9hkcN&zG-zQ [G_y(EYut-.{?>2Ul[Wwm`+D(\!r -%& lۛ:/s KL:AT {.Q""I%D;NlZKp3"'$}ztVCpCLDn,,xGI4 t#ٜ8V} 4BLmfïY,s٘`|'˟)سLS>jP LQuT_g.PXac FaxڙVyQz;$Eۧg)2kNVoِhosuVb7+mɿaU-ieXo%j;[J[N/X#2W $јDn݁+7WJ[sb2޻z=ǑKlG-1WRX[SsZscNUGdZr9bJTjښhv? z\] EuAʊ@o]Oί"_E$zNL!ep:\pRm\ЈT9׆ܝ  y)2sw4G[aԻi%'V*;p0iǽdiҷvHn4'tJ/:^BY)ОtxݴN|>Q(x8=M6'ݓoP|һL$mhWgGOx|%+Lb Ya02䣥3e~~ܼ@,h4wSLky'3J5K'm%pk˕깂Ơ``|< \U ޕ+MS:ZVaGf}ī3^^}Pa!uєp*#O t* ] wViEA^V,#J$9O"cI$j<OR6 StZkXӄRUS:䆤趼;xu^l0M}@0hD3Ӑ}㯻@%!辧)9h#(j+RVD5W\eY@g4aXޜdſo(ףYA(8E(1~<8UHdr;w>Tz[aZryleVmpqUs9J MfɁ,O9=E_pZ}`;<Ǡ{ R! Seaޫs.֑\]wÊޞQXb9'Xq74`s&z sZ3Osk,Qp2VMܥj"}&gh N6"E\/c jyBXQ^h(Pʍ"VFudXDc`ΕZ t\xm R-r҃' ՏKW reXdm7M-Pi5j>مPu^RÎu:10A0ɵ r B($<3@cĜE NVF߶hM}{:v1wӥa.Vy{N:V-[Y DOECůy),8GC5BʉcNѭ;ȋ|6A}ѷ6E-Ksv]_3?;!Og%RD::2~4Ep{)^7 :j,RbXw)8F.U0Rʃp-1Kȳc9fyW(b=#*DI尵d5_}ob"5O6?:fY6鹆w: Z"RHN"s̔fzWGNb*\MjSw%$0xEө%4\E^G*S@XQ* @[}!ʓH&6g c!P a0;\}+k7Q[0(zKSV 1-ڇHDe`![ߋOnښ(RlF]3W:*if#e`$exʥ/G4,n7l=E墊t۽?LGo޽-?>?~9>wiŤ9:x?Jgyn { mƛ -{h5)mkɸ)4fa H^(*Gӳ;)NWB7XZ{qt^Ku+ WMz|pխv|u9a5Gh~M$Řl6^uvXܫ0AxKjGIxP;ِkzabaRiA=*K;RQ-ߙ^"ndGzZu8oi4)h#+YFH(",&CQN+:tqyH][atDCk:h`7oNAlL -__*;%,"m7e嵅M&]VZ%jdEdwbs 4O{4 c%&O” v5v{h2-MfSψ^Q@:"PN'DΘFJO5^I5x qQqQqQ 팄4eTrRB;%M,"0QޓځNxG1XfY)$v$ fD02I2JoMQC|:.Jӝ1&ݤҝzai;NJͷ@Gvlgݜ$J N)>ї55~9әM<@JTK!/̞F ydx9>{5z}x[=jtH7|oiU)_`CTր}`֣ox۳LYn#t!y _BYGUuH1jz,,K)[ҚAw RqԳHo}UQVEYz[vt"3еV,|OW?-Q ۍa429x]qMJc[Zlb9E!HEf!a]mJ` ݴ1fQ|`i1* AJIHKưdӎpN1Ϋuӄyok; 5Z 6 ۣ>$0./ǚUFKkpzk2^X>Jsnmej/W*s\eq5#V<=:]59'$ V5єslw7,oVHH$Ķ!$h 9h3yt@Ɣ*M"/{XނhS4C肰.E}q6KPYS8 cBkkmwXkM!u/%H ݵSu͈4쎦kI݂Ft?o`wV/s!C(pmˊ3M{Rv|.)њP+e"J,$m1M (FZ F+G;|75PqB_4$4ַ_F~~W(@m1 _ގGӜ`آ!K?[:dXTUNdB5)p 殶mM'=xvmv ~s[*lL'k!`T$yZqf&.Y/,\.~:\Xx.iWŽ6\+q*h1&*",D YEcF KBL <ʡ^/aYHĵFA;5 $[}]*$GbZ*B=NOi&o~n.[/lT\b9zYГ Jd|bҪl=Ȫ;Qzrk'ŎIXy)@Pڒd rȃԮ!={AA9ʬ̌}NΘX%'I\ K]qr%E]REWGg.y5]MDeR? oצT.F+*^2̫ԛʧ 9XM~/'qL~] $KkT  Jbq` ?8c`ҩ?~0 xzNl3}oC$iUփ %_2GkB;ݷ~8 J">8:Z{п,wNdcybWgKՌdt^F!g׿15̧vu3- Fq?4tob?ǩ'E`Bdͅ9u|yro I`8xy1NoI,[3ڑ$t0|0kYTd1 cDwŲ'g7zt5fqQ9}$׍fr;uTFHk8)NW&E^}&3? zazT\JJҽp.e#f4#x* -;qoDyk|& {pc:;aAU@ Xa<B"Uw lu8h:`iQ)\?^0@g)!F擳41 Gf:lt5x& yMX&=イjnP0tߕ}WR+]tߕ}WJ]+wtߕ}8}WJ]+wtߕ}WJ]+,6QZG>Z{SE~"f])RZY}^ Frdm\c}Њ9P+`R 6h~l&)XlFcdtX>.-F֝7N'[7unt_2DtRZ#EB@".o6#5BHgUc] gֺRW/+[{yUUQ֦}ר]]@W|@b[naKC9\j?2JA~1LF&'/k0_b`rS:1ə|#.m f) T}(eϹ {,[EZfBR"G6j-8RZq(S g'[* n3p;O9ݐyu0Fs6mKْiHM}FX)EPB `$&dv o+;Eo]]Jlƅ;k?rG]PubV]BK |[7=%*+7DE\-HkykHJT%zD {䮊]qQ*j躻*R^ճqW/sFWW+'PqW[ ᮰>z0IGlo"i]wW`)] tW\k ?{W؍2;y6iug&&A0|i4xښ%G;,aٖlzl;~dUb~8.D%[Ʒa7W(?WyϹ7ijeAQi&e3z8!E"|Wf'Q)y~REӤ&ʚtM Z=j~ r(3S+kL2EcBy@*<ﻺ q(*SQoE]mG]7+e-궟фIӓt6.-J c5'NTAoA4~4hvqr&$#sR g;m,LCQH ftɢ_A#&8  n2ר%~tŹp?ջT$"oN9|?sn7O C03?ǁOg?PC!ny7Ιڗ6nIJuk͈6jD/V7|?#NJbҒ$&-IKʬ$&-I_n8xb8 $𒘴਒$&-($&-IKb҂6JĤlĤ%1iILZĤ%1iILZĤ%1 >y_\{c; T+HvH{ .n4ʮ xO=K<_{߮Oȿ'՘uo}ԅ`rgKvӄjh߷iҷJq9Wiv))n|m';;շhM,_ @X@N%eLT&$3⩣FkscJ*=kIƽ~c7Ǧ69#˖Vrg&R5J}D<7nzsXadqǢM|91Ln\(!t[uZII-5.Vx஌>%7 <lgYВ֮hYۚ3$ ؟4$;GrL*g %9IǨ.ZYQYèooCVd XA;HPrKt@*-4D9@2e@"DQw $Juy[FPHh6ܣ@H )ڃZ~'e[yS{>ӺB9 REBp$"Is #Ɓ$ŠĤ` DA@MPhLQ(a]]6-Jʰ,%eaI.~SCIE['nVgB}RZh$RE4p„BF"B%޹"mYDAsTNp_57[Avq\ݞh#+(a7~@OkgINz)hHۂDmH[DN ʸD@U^"egAMĠ]혜!]-OK`!r#2zc qϷ Z@Av/(I`Ʃ]/0Y,YJkai)O5W:1 R.hEX_.BT+at |BI 8Fp&QS'BcJv wPcjt`la[_ǿrƛ%dR+d2lƭVv ̴{Atϖ5qLdT5#+fKzkm4}>.wm~Bj<^E0>uJ&c&y©ORļވȩg$#& & ڎ3!fB)jcDNpi㹳j (.(CS0$="qm>ߧpxA%rl.OrW)ih+""$@HX.>ɠYHM(HP~+! 6>xݫa6FQSM<RfbC\M i˲(U(Ѣ D "KS 02YM2*7e9IXw| dHdǣȯ691rEgֵA ׉s\>S5.[>V4y?Mgcw͍wWɅy`A savz9;-3U~ǫQc?E n7rBr\cQK˚aKAV]<|@ o`ż{@o\tFtiUFnue.&T M|2>|0VTeqLhUJ_Ʒ+߹':w G`T9ZFIZ)˧p鿞M[Mc{ӦAǛɀҮjKڽ/>rN#5َ-|~w>缞A6'?fq'{ WlquΰO4T.UQ!}1ÙY <4_f6=?m4SezAI !Sɂ6Z0'B{DN2("8jMգ&ukTa꽩F@1:9[2x4p#1hV3CVu*Pk\4ҒHYD< eci"+T"8?wFGڻQfFHGa⠡L@r+UAcli,șE0&87Djlw."q60L3yv0g6IA 7S(w:@ DMNƗ6jɝiӉޭaj-p'@uX\w@=KNNIp*5JK'{LgO!T٬YRʞ9! ~qҪ'<^rpH@P8VFrPk)ϕQ8$pHw5y&A-T6%|2:岞=%`RE,P*$zbbcO SUqP(I!FtDs)Xh'ƭ7= 2WQBYS,ZpGe$Na@#(!<($.iD@h/d>)?≥9eAAJJk|at$BIPzM80\Zb:HgxAzM "}$V(|LCcN3 /L#6\l*h oT>[J~`D$s= yD.C|䁆S A3Vdld,f^YۼO{.@[3ƠK_BpJ9999HJ1SE1s`1s`b,Wal.\7iw_$B "VInbʊyq]Զv ET@YN8ztrRΠ a(=r:1*5d4A/qe3WZ"uւv<.hCL$.gzH_!;-)@?xݞa= ̮!iM2,/odU*Xîڶbedf_DFF0rVk򎹨b1Vن#gXbzBr^-:Z=H%7ih 5d(ŸV8dvhHD!=$\{J'Xjv".1qb}ғtpb5Vce+jDsL(QmGsT Ȣ`;X+\$0j -79QZ>U7 7x LqY̘C:`MH8.>h-GY{dOmT=6::n{M@1\(wp)BSϘ8P=d:FY^1Tk%i+خ?c*>߮C梅 / =*ղ{KlEi^o۰ȅ>?9]>4= WԚ.¯ -rnL2WM-|0}EWGVۤﴃRoc})H=+Lgs ؞Xv^\W-[=^POl BW_ۍ`I\}m7rՑFǩlcu:DW}[)a-zjϹ!T`Y1d QY#dF8$1GGON=;}k;r[Ywq[)BbX( v8%aG,^ װVs/u״ZMk6bmZ 2I}t/ByQ)‘QL ^ 㡛0\3U~2(ra݆MLF0(zKS(0rBl@LDT6cwn7*Ńw;Ԝc,j0u cé \j`rc'vԬ )aNti Hm F) ๠2H1 x探["]w"xָO)x55.uoa2kE> 3AJqy-rreH%u6\u]a^~} =Ы 1@`HQ>9)T  GG#LE1yNj| " tV}6[\85rصR*\wWUfU00[_F&]2Bšػ58iv.:c0R0 p[=CN߿05.@̦6N\(Z|MaT9UOӉXQ~suu`ʫ17KR Qo8z7)w?2VTnV CVC ՙ=Q`*&+> ?ٿOLI@KG%hIu\œ٨e_9_Gc^}(ڸ7*7gKXR<{i/^;۟~~|ꏿ՛w0QWo߀V`R3XE{) Xooah0thEyTԬf\W(> gf& J?~fei?:?YM p%u==? b~S ^QPT1xB(!3=]@/޿*&.]S}οXͣݒ`;c'c6E*8"E c k,+@4 T99I6Lu^VS8 xn/-Xv ck 3(DE5hl0IԀCFu0ݓ}W&Dhh' h0C~ᴶ~'ƒ}vBEO'U]tK^%Q<ŹeH[l_ cC |/zE( |vٲe(}fF<Og3%a\(ݎ= _eKx^^QuP5ɗ}f?fnWV~nUt6]MWM @׏2fS*sRlqA,U .RY ( Ts#}_{+")p߁^JXc6aRY\xH9Ke# er eȐ1Yf#" %aj}@.+*SS6[o >8z#K'NV4RIapـ4%DfҴ 1ѝ4) Wz =}d^KQtOCԚ1 9#eIW!@ _Ā;r)?>ƌx!B`)`41`%T nx$ELDyLJR@S(:n'#pb8cr 댰R.)`D2DFB\FUFpl'mED lt~2$Mȩ ⁰Ъet̤܀Vp%8pA@<p,S9wќxXF"qK5,{ Zۥ6{<d#BX$Rr4!K ;RJ%BZ+Y#.C%|<)oƘcd|;hcgmI gZr9@M]orNNrk]U"IVSn EN\ۮTǁT)Yk>{8Hr1L/^&|L2J͘"nj M0&;MgٷξqEPV^: tσ@WvHHH %Vg>"KĹH2r#v#s][1hY`Cht{Oz[+03 w9i%$bh8ApQYQ u4 {|_m/G7- dΤ{Q$oj} E%/Fr}.~gn^KJ0 ^^: Tv[t*ۥXh4i@Ԅ e=_RZƗ  ͳ{A>?KTk9ro϶gрmP f }'_ Wigxڻ^K#.4)-=2ƞK"HHuR*f юn-f(4%%T8 y9ARX{ ˔:-!&"Ŝ(aHY9KtxtZMu^\sx E˼^)Oœ#R,3$ϴ$Tb*RqgrK1Jcmhk[߁e7Q6Cԍnq6!vpf'.fIg5*9]QؕdK[2?ƹ@:+O'EOXd/ޒÜͮ|`)4 j $ CnPvxԈO?vlg%]xЇKfŕ8rHt,\]Pn*φ8ݘK>4F*6Lr6{B-s+1/?` ɧ}Gᧅۉt҂**_{6~wڭfu(6+ʚwG쌦 *f,/B4[KZJ~'])+kUt.n6=rܒ,zdǏ$YQ! L_n^]Ԭ9Dni~-e L4ak>]_nO"ݱ~|7u8$S.䦓js 7w\@;+lHpɩ:n т'ֿ*tU p_\B. I 3ϺruydBdb z5av)DN&iܷ flOdCֶ dzhABiSrB[&lOHJ*竿T=-ӔfarIG|} E9l|{Zϴ7)-l*[@[*{nVoLסM~<㚁ɣW;BND4N:RUoPc^7<۔(gġ=m(p@l;W߶r :a.[CÆu:Bϰg*IU :N )ݢk]%O!裉ٻ޶$Uu ~?3Xl,~8RFdݷeJ&%Z"%a6z+S^@2-ϴQXatL!SJ!IiAyu3ڤ $$WuUo\X[ٺ}'FarMLG eZ30쵌FMVHKDg;?h=~rU80$5sb <* s~]`XZʩR ˽)7ayMsn]u&ra#iCTλ[[JFHq|lJěU`̙ K1t=!.RIꆥa_;-.]&pE-E#&h%mփPɰ*2d@@w7csPRh2adȘ,RG @4O_)C(W!ciN ja<Րe_߫8On~Ej1^z%1%$!9A^)JK-IVLoeF*"i yM0PҠmPV# $0YF>< CkҊ)]?哂nr@fxż}0 [+Z BSc(Jalfo(uC.ɥ,zeLQ5f5ДEQM  bLQ+EBOג"9BA±E1ckmDBhi)(p!'磟t^v= f KFsq#(b sd8h D1Gщhr_);O1G`UY }؟qC3]q(`ep ^!Qں[CB^<EV-cF) F[XA@<p,S9;:cM"qȡ:=.-au9;G'0SWE8':*\ژ86W&$qgʍccyI9)|oܢ<3s}VǓf9-oc)'9kHE>Ӓ)p{sZ!9>dlJa^iGjd~sVk/1[,} * Mj`*p:Qh{6~&invwsn$YTuN\|_@Cdm rm 4:8}6`4?vovWvW?|>,r5CU`W1Q 0ƌ権)!ՙ Z3@*a}tK`RGQJmƌ6>f!uLYp; -Yԇ|}ι=@1A@PYN \l~uya[ :.@0EFYYcI"!2Y$O-/; Z_\{J'Xjv".1qJO(ܱ+ʱ2v^oُ3ڰALVn%wh;s{Jz#8BsAA^AW⟓M+Y*LMtU_O~|}IK0~Y|]> y?M~&ٲxt;"(xk."I,^>8Dk*?r 8 _R~ߢP˂Ϧu>}Mӗl!~QGgʟK?rE\t͓uz]x 9ʕ%+#vF 3Ȕ̂QZPnKRS4zU&SSV ǝ\5Jj!7yQ|ng@Hg,Hi#H$18TY}tdV&M˵Y21o(cDz" {^4|nGG/^OkT;CnO7TAZz==j W;+"3OƇb9*EXI6=j:"TϰUZVN ro~f嫭Z>ݼjuZRH>`oTE}(7u1Kݯv3@{ Vc-D>C||ꅂ'/G_ x 9D B):e/[|ƕ/fc(_()qB(W= ZD:3ʶ}<' vn=GLW.Ȥyvξ,텩մwB玒~AE)ؠQ-$`LoI<%Yi|FG0(CgB2~ 㹨6<\+pL^x|xڊx&yb]$`W IO.KZe0 Y.C/(ۍJ٬١XS+U:t*~ '?$ B|OUܛ-=?Eou1szE'iˤze- ``.|_Fh{VAv nJ_51%;u7ˮ׭@H|¤ʹ܂Gz^8CMku z8:m ֻ6ؾX&8ߓaKuz=ܓK ),771=mu X˾,@i0E~%,rQOy{<ݢ̢CwCqvCS{Z^H?k"D˺!ç-+h7 poeu>12*1Jf4vM1W$/E5Erͺ_~W~Lݘm@rɥ b1 eՎtiEǸ>Cax LqY̘C:,Ncʠ0/c|-n>iULLryH($<3@cĜE NgD8wS1ǽ yl>LMx6ZLa~qT^Z5ޜ Juc]W,'~_DtIo>}O0Ex"(ysdET̿7`{(,?7ZAm |%C Dad&d;Wn98xd^.ū 9 3X Vý:zt7?[ k*{@\swI!Lw-.oͤhλΕUޝfYͧ⽆&?yR,ܭ*2x6l57I.7N롂6c8/tSO:zJÏ~c?z%\S~zѓr TOuǟo D`B<UXֶ S띱Vc&ye4zl5ͭ'P0u㏞:}즩4v.bADk[ݼߢYoo+r^iz+Deަ, mtu wqΦSq#Z@s4ʽ:]?Mhfw)U&-WyCc{ i܆VxuOÛ+_]f͍սW2kʲ?q)bDzh5OS y;Zw4]Y|mrns[!2j{%m\=*-øEƤ\^f;Cl3Rb2[܏1qxهtڙ>*eyh{oh1,1X۠KƞK"HHuR*f юn-f(MH7Kp3Þ "KI S"!&"Ŝ(aHxw>A'Sϳb:gCńx iG+TS^+-x>zөق|w^^| 1`S1ꨌFęJ6&`8]$@g5tUG8S2MۛbI)uvop!>h5؈DdErl~u}F;pbJ&&g1h,YĞ"Os3z8=E s@gGj%)ln6r%G*pOF2V2""&ZH0<8H ~O?4ڭQg3|f3TyJSd[ϱ wh5Z`p%QAsglrD`p'!.XV Sd/6 _zP *" GLNDÙrJ KY| P!EC5BfʉcN z?rzO96I]}i`L,=W zSda!RDBz K`Siv H{ aItX`vM݇"M2A:0i+-K}dHJeɦMҶ:@^^i(Ǽӻ$\.%;I@Vi,Rʃp-[pyCg4| *GdZݬmT^( ;%M"RHN"FB"1S9]^9E82p$ѫ.%k/n˵ͬ}Hw^ RrK̰%ٗK`@+}jROa~$h,ECH<=@ %%!9DgRP ;4y㯇(0y 0!l*Kmi\6rExrv8(L |Ň5>Ykvi %R| _z:>>Z\\_#>F-W8A@tG.N(ܨr];SxJ1ŸEw, 'ul 03 `.̗$Gg%nm-IMoqf ȭ;Q7vص ٹ Tg >L~ &,&+>=st1Zr8] }!w a^K)vk1v:si:'O D!@bbwQ f$BS51/Cy 14 uWv$HC[kI!R"nE8%`ZW Z4hQ-fJq$\@*0K9)K"25P^#Ekg"XyyDŽr5njM7+2='UCa1r2]D*4쵰fuKJt֫5lgTaHmlZDQ?>3ohLk LϾ3w"XS~cIaM.\a5{PQO?p{-/ڔ_,tW{pKlzTPUb^y[dg!n}ۍ/UXNLCM1-ƹǣ$-^(z3ӫ{gUFY_R8uehPS R\-$нV-f~އίg3[W)֮%ǧU'i\PC2/ h9A}(?jiy( V f]k49E/f(}10Jaۛd,C.c݇ZJ.{"͊ ;P/VNCENd|߆nP+CP">tP]1Kh*mA&ÉATfy(kohxR$ ;CUHZC kGtnx)z r;TfCQ{{0ibVr/*Օ\Ӵi?QAT%̧.SjX-(pP>'x坙_8r#xp1}mU>O7ǟg,W?"~M;hk ^i(fT9a2(@vlp Eˢ)5pW^Xqp\N8xU Ms(]0 O mY}@ݟ-'D]"yW޺rAџ:qSR4\zBa6XjnVsb9ǰAvYT,K՜8dV`Ct7f_(6JlZtc4F1"BQNG/~5%c%G2 *[[z3\Q+C>/b7jmRO;i:3H繍=X'FdluM(*ln&Ne66ˉ?B)gBt8٤\+.zϐAxOW]SIr!.U>% qvRf&+lJ"2ď9=F4Ԡ>דWl(?wxAá,)0R_}愦'WC7.e+L2s J\0W9g{ȜM(dr}a3_&a4y!qXL/U&4W2V~?vOOj Ƙcbv98b :MUSo,ͫL>*]^_U^s%UpS •j:Rjsfȵ#M0&QVӁ]PfMeS<`s-:2VTJ TW~w5;k"kY"M7>HU< MDr_"~ az ujo8qZ IiaƢzdL*#%A4f@Bx Ts qⶽo4:&LUA\ArO%k (1 E .r@*0٘(6->"M 3*@]iAiscgp! LbrBf7zMJGy4{ >/NsAVb f1!zDC qhK_.E@gC4a "8ox. 96$J_z6].R9w𳜍'sb=EN9˭$G+L%mJc.1Vbn N6"EHM|n=xՉT~Kzq ;*wDDUۍPJ4h]QJFke}Lrܯ(bx@OyEI5FH!B&FC&xC;cBUt?}-γ夐ъXܭ=Rs<,<"r4A0ɵ r B($<ug>LLj9+%i޵6뿢m {& /Y x)%%όOՒeY-v˒==r7Ejtr8 w0w56_/p%tQJhR4r m0*Lgo"{};o뫟?'_ƫEE,sOAfUVι^ zp),(EJ;mfh6&sXoH<[2,8$G{ @HuHurH 'F$Ր_o_o/񼖜?’Md- }8!^rV D4Nיn_OW~݅1vMs߁Sof6+RK _/tj]xtr;@;TI-:.m6c8WQ O ȑ.H| xrsޫ%TR;J񹗩]$#)eуv-VH?`aI⣫3V ]MřG *!/>c> u4LU^sa6,)(R:Y$c&$&YO" BtiI.dɻ,t4=(,K7Z^f[0,iF޿cXnS$q8]OF<6X!]K@0r Ud @o*jcdiL%,nqٓQȿX>@Va;phA-W"2a}$.GH<HwKr13sZ}̈BFomb,&Q*(I< q8ޑ~Ԧ#NPLTOŪQhuz#ϻ_p6TR左&t]e1 n_\q7 =@1a^N G)o5Eؾ%M<K@1L1 gWт^L<)^tF\[%$1 % Se"#yBnuM=p\hп-_1Ց>vpMnQ?.Z-X^ J2bYܫ qvFAgh bxN5 =Sz0o^Lgmy~7Uldsl9Ku8ΣNj w+Fx2ͬ'[.DnI#] [!RmfX~!&OVbvӻ1ٖVQ6j˳Zj~uZHXyr]<`e83L~St&1l8h;?c~x?q@+0MgOgM`e| {ЮiCk&|ft->^ّ"MK[H8i8%$=?t5I#l4Hд ˚颞՟g%%^Lbo3]#A퀪ل뒈-/G94ߊluVFi Jɒ.$IpAjgp}$unmXyU ohqxfAt)(z[u`5Q4t%3VHI#qN1%ILSg7W$:{aQ[;ңSq41KMr|-mQ#rkDC@ rUt%˄a_TM-;hhbB*'d`H׎'"G4;q!;:8|W}ib$*(JZB'ŤDB1{7v)Y{Ʈq#D8ʍp17ȥa/PZzBTRgk0^"!VR2U Z*iO1']fT_};>mwM>W ,S,%'oہfR蝨\JBĹ3$/Yx“Uyc `򧏣j445\,_jeڋgi0) w|u"KѲ:ַ]qA>a؞~WE펍O:#BnW i{cӗ1<_gf}֧qs9XQǘY}缽ey˟$ŋQw{E-vWmQ) 2fz\.kGdy<`i=B[pu~d`xaPl1Q}Br.Na&(ڵ< 'H ?bq..F5O8wSLN M1$-iz;ej~oRӦxN,̔ShǪ{ˌvR;5OwytA[ 5 <8Va8^hYn^ZڤXb5s2ӺGv@ v XwhVTZae]H'TkN<|;٧`o@yeRԴ8N:~kӶϡɲJ&3*Sց1c_<~KwE_Njw>VI[^㍯\HyhWh{P7_HVta{Y_==rOHAn c G*slS B>R"p3DN@<; ;?޺ѽt;;IN!q4M]D ʧG(B k%R0WM$]edZ^nwћծK˝Z.ŢkYµ59%馵) -6ڢ(ds`ҠPgBa'X{+̡G#K$i1=o9dټ&_F=@X%Ͱ>q&B}Ah Z&].sP:#fbXS}oaYic $LA(SB5Gn&^,!eX$] 쁉X71g1m$ )D@#\ C]`h#GZC"U#;񭷨Zի0yCn{8x{hkH&d )pssT̳"d+Ktao6=#ӿHfA1. ި9QiJlNFt!#VwVNU'W~; y#NT" )g.͔Wk-( &ż&bepqRr;']dwq,Y"n x/MwmH_eqMa.3Y`0~ف,9$s%[/-Kt;V7EUXeKgjx:"Ϝ%dBjZHL Lo9v3LUA=:klVFch5W q7fA pe5e>Y6oD X׽܀cZ}E`d:FXzWG߱ԏ_.oiyʰ#!2@90GcM;' ڨH0d똄?cwZB=*3w?^]0zCS$55T;c*W$gc!Imzޗ~27_k0V?.ǹ+Csμ㉂0so_.&&5jI@ІTėi\}V4 ^U+tVU\Ќ. u r!ͿzaUjQVˮ]@W _Z-: ҲL ƑV*RL=V\4Lڡ+D+ P&I@jOO<=kaЋZ 'Hi'.u),Y[Y! X[cL{M?B+ 7`*~30:/ yFU&Yҷ?I_OȗAiNCтJn$Ѝ0¨r=ue 3,"JUQyE2*0ϱ-8= ,gibci#k3y&}KdLh'ɍѦh٫z{/&g;j х Dg]irO<߳o m5`{V G{>$٩4ry4 ge0&b=-/) @-N7z5/{3+Y{=Fo2 Eï#xn_4 \gߟ_jghyT"SOҘL1 J+$%m5rj9}5yvyAA /OB }xUX*ZEl#tQGX5ygXkF IbFHs %iDbƠ8)&d 0Cn6xR&(3 DvS¡H@&2q.3Ecu]H|zLGψI{;%aVroBbRf*^UFlMlɘڅd0I4&qQ`pd!!d9J<5rΪDZqIVg>qM&'ǵB*t|+q7`ŝ{N[bKbue^d=hu~i߀X6fE&-+딨"hA蓡Ij5ǣI10tRcR!=12%HGQLܝŝYNJfaJ(dnS#&.2!G2BpVzUf7;#gGq Ot/ 䩘RT ^us9Hh.S/vmziF'D\0zD*]Qt5)zhK2ۣDoͺB*YQYݫ4j+15/ܼ򷾾F>I-Y[*.w֢=G,zCҜuVt莫2r'uIZknn~ \iKriu;o@sOY"$+Ô!AI gB_jqC/u),brZyJ$cuR,R ZB #K͆3#Dj%XZ YP s)!wKY]==q8=XHտJ7{sY=}PqH\._LM̖X3'E'4`!3uJ w.Arf NZmyx=aڛZZiU ^] _߶́+˔t/ّXʩBNJ0R Z>9ݓ94Y.(_]/~Ogv<֫{iyVŹ!<!`T2!‹"- 0M'xU>45w)"C8dR1 i5GmyEl:5VSx:_ǔfL˲yᆄ5j#rSA3s4cI 1dY•f"HB/uzPͨDfKޒ^rB,Ꮼ GRojY8@ڮhqzA=O4Uzbփg}el=ĥSЊFΊsI'VlLq4?}vhڅ)X+X8ùԖKҹ@.Ee$fA̬%/ ,}NΘX%I$$x@'m-ItKV;P ŪQڅ_o;Mο&:fqxᔆο5:]kR!x[@fv~΅GԙڳJ1uD;ۋ+0sO,Q'ܐ"P SLpƁkvrĄ7+z2xuzoҪzcɗ?jr4ɍ_iܺ6 Jq!s|>p$;Fsm;L_;׮D}WBoߞOULIN/NNY& ? fb>ݐ gw0MԧiΤx11T͇w7l1Y6'}s{~1[wc`$ouKwؚ׶ruKWk6ޟh()>ztf;9aVꬓZ]WøvӌGJÒ_P wʶqOxg09W+jC?&uםн'ڎ~xߝo~8g\HF`B G&gY>Mh647k noZ1lߥ]d״}ŽT+05w}w!Y8IM҈p;q"j;i5UO"[{*[|YL < PN➞C}~48=M%)dفF 4܂RhC"yQqJRֆٞW1Z^4'd4[u8ohZR6H#LZCFuFi|s6c %C̰xPv⣤ss72'ku82URKzG%,/Yik7ă11VYBlTF*LPh#hGy(6yIF<]-<D.qk8pRǑkaz?ZI(*Ǖ[?"nz?}A6n Bki@71,T,ҍ3+)/A`ڙ= ku6ƏhXS>Ыz"/="v>Ψ1]#2jd;Ϸfu>g&qwq뎗{]{oG*ػS` X'X`w Îr~3C8Hk$8隞իPͥK3}=8zדٸgg?n糝{9+t8]Ik̂,p q9y}wNW7op]M]Ui(7+l&x.!s 2;`v|>`;3bm Dw V{v*DԃTjfso&p=}{"[ҶmȸfЗ x>nj IrB[3G'EfIt^v(_5vޖ'-F_."tBO Dr?f!,1ǹ&$]qeeΰ Զ䝽 g&g !7H[GV^ZtSȩA_2)3( i?j˧?oׯ|:a\~R_-GK4ѐRD` KbRwg7 fݝn_Ũ*܁F9vzx[N.<k1//%dz(Pu{ow;=fT:axy Ͻ&w^_8%3xS } {KʏV'SJgkWC{3ŨbniB?%#^SB\^QuBN*l]x SAtG1C2>\*TNC)BOLkۇ^-y~@y>|EKdQG. gGҒ HN:H/NsE"z/#kuch-q6%T`=o iR^ (n9[l̡ =~~_z V_CD1җorq(AmVUj^A?d3-Yxc};Ǎ4ᣱ&A omTBDHFE+=?*C^f \Bq8 X tE8Q2(&8.k(#T&IQ6.r%5W__σ6V?R\Î++/LØ} 3ԂUf5ZYM[Τz6J36o4Rj8ٍ`"<>ZLG=Qk~;QiȂEE")˳ (:+U9sUƫژzMcIdPx٨k k[zL[͋ }ۯ+W7?k\%J&bV. &`\ 38>DEBWQyIXܠz=~B<*]?W/<@$&oѲ6WDJι/.2u)@ܾ[HosuNant ׳]#ÿёʶs 1dB1 r4a&Wcf* rIG$rZ,5G#,^},*S+衋Jۉ(䜓#W`F\!cWZqURN\Dqer<;}q7bѰrYE.g[9[0kAgMopRCYR^oAgPaF-|-&k~4b:ḵi;$T.wbiE [m4z&wf~7~~{Վ?BD9x29~$^߅Feo^I_Rf1v臿̓6A憧+>o$ :D kaeT!{D@.CHcN; b4u; ej}X#.s Xq[LtƋbF !RF)sU҃qWB #Au`a1hXO;#z"*k@wR*ڛȭՉSRP 5]5tQ==39-̧w42ɥ*=M B1Ft.F|*Q1UjvyR+cg=u$)r?{Og{?[5*JP U%#KS3kn.n=8>w=e'=x[kU lB3j$_Cb`MyVhq5dpv5nzU /NW;wtST kôr&21n$J*r$9IpUHh[z?ZnW1 y<[C i9]-k!=..`A.۰Ks T*&"H`$IQo*(mx *FJYؿ$>Mbc$Ā#p˩x4P NR[dADshdw@jHiDP4!Vܥ}TD$}gL$$D@H` m[7fXW, sT#Yg@X)!).'z:teH%,&8ˉ:ynM~gQ`'D3Zo-׏M${d}s8" ;&'Zq|1čp7' ۲6 ؜@Jz!Rzm9I׭>4A`PQCddqnkW jD^Z ꞝNyKALnGO:spx4FNP4aF)KT0pkF)R}q癚E=<*T›|0[B$uuՊ+[ozm]ۅ Q1~Nb~ dL3 |UӰ06`X'\YBo]&krVF:{UxVCdv>8ajA"Ul/Un ?N{3Ywj'o8j: V"ξd#̯uBs?ݧJz{*V"İU>\ *;wy'G4IG=}NR`SޡeL'Ǎ5y#&C-7^hs@? 4f~3Ia& >b[%-:7 9 i#U޵qcb:1k>,bQ`X۾.xؒ#n=ٲd$Np\d0ĤE5"*Cf:Eu4q5JJ?hu{=}^Ud8?LWyJM<\-%V~虮(iMH&3]IQV1)l[Jl[-gPSkiʣjiZZZZV+2H(δhe:^< ;+!\I F56ip+ij@3y1L\zhy@e2URôTw8T{*3 Xb;s sFP' @X@.g0Sx ψ9?LIE >ņ'%tsOBR*|_d"̴E/p7,;{z?.?iE\MƟZO^=hroWYޮ{gťm,{loQFMqw|3j\PvNA~ `@CF8h1dV瀓jQX䣇 ؓLҺbo-]_4"#w2zw,V'0g U^BTm p&eF2Y"ՁHO' =x_O;-!NKPJ瞕̗Ź[nlLj6o:y?nNv6}o@|Wod ^5`dYMA*@CQd J%`RX(qT9*x XZJ&+ .-/of<̟mw[[ -l M5xMLCV\.[]˺"tW{V be 7v-WLKzSUBdUnPORi[?@l ,mKȸ.f:JCr[>Ê"`[f#),^C48A)³WG+Fԫ~qսctMI jr~ClxH~t޸< /k:6a3=fOiL" mō긿aœNY Jʾ˾tt WŢaw-qR`o 򰳦:5I ˹wDG(,FIOxw ҈V3/PhϞcOhRbkS]JdO&Ԩ m:xYߠ:灥^=0E]ѾB[rοwN~?~?/d˃CMAh I:BdA|j`!Ȑ8P91Z%E"1se4V0\PB0?klEk?w3͍d?ͳorM&wҏ~z,^zJD N&IŒCSst';3H$""H#!0@!BpS@$Ld$kraf+Q:KDRqRxP1d)RUkj@7hdָ-h --FbdkxofoZ˫ӻB -ŏWG{uZU/mv8^MiڔOOݕը^gC5la2CNO!FZ~޸DXA]Q!sa;ë T? Տ,Ԁa, TA!Sr*YR!! JG4ipBZaH9XQV(4w\&'uE !1"7*'C榀rWklՊ2X-B6[P8X27S֟rhF$#4(T3^a,,3F8[g$ P\ȑja3tT .gIhNt1*-@8+6 q+<Ԋ6@2\D&(%N:HɆc"A( R@Iܼ ?h*R2A¥psaQHi$rՆ{"TCZIo3L9 REBPSrcr6+if#(X&ܙ*8JCXlM<3 Nv4}[Q x#K 'Qpn]>ڙP(T 0p!Riw܎z_PWl6IQs51N n4 K ;{D"{~M4'`ElOK}N0Y~,ᙂ?g`i"ម gDcNfc +-x̮uoy Fɕ0:z>X$RИs)Q R1ZA Ov[ z=|> I"RœF9d: uB-Ozrr8ɭ>TC{7>Wy@DáDm {]k50ysaKz vKe=R=Q=nQnW&v6_j6%N~1gx]` Cڍ5$Nz⸩n^,7ɞzV.&H2.#B,PAM` 6>H ez06p*)c2$9O5Zks @*]֪po=Bt͞y 3H+wUM䊑lh[:\ V7é>#7_^Gm]t/ɯ_qe|"\2p{V?F_?qY|ҭȻ5e=W7 /yx>l {yZ{UnE՚n̳f'YM5~5lq|Ap;i+e}Ц/bag\LsPRExɭ4 v3p.Lqb"u&R7eS/!f\<.}ݣTSk#DtdZ "77 .lӴGM.VaR~XA2~[Z6#ρ좐g{twzrףH\C^RLD-T3 vB Tm B Y΢Dz-2hSΆ'uoDHǃ uM?Ǐ>lWV\rlwCcgg]re pí:GH2V:mEvEf@9C. igRihsjk%NR^EP%OJz/YǓxn?;Z|@ÍKcbMy6EWohخ {!e[Y/0ŧS>*`9J2Ihã5CIv-GY-ǃk`6KOB(I1(8vb . )D &Dw*;U0HLIDk08E2aiJp:$)(B`NU,+ >~.^핏> &MeS[~V{֥BtѮPjO2#i0g NܩpS/0!Y|_m)͖'ܒ!PSLpƁ+vrf4Ћ'mEݝb'iٹoK%aU.pKyLd$V ``.Ѿ ;>T0('|/<ЗKw9 |D+_u\ÇF2ӗғUOM?D:GNݐ8'эQOi~;qY1[b-_<O.9n }$[l(([ڕZźeeU.,?b_vvb|5ɆV]Uͫrݪk2[ږ FHYX:[ߴipm>F95ӛᦚpӛ Ӄ8:'tO?~O??>ϟ>O?I?LQ('@b";>+*Ѿ%Ϧ>8i3p)D 1JPK$I1)aIW9f'.#4w]ov5d8uYcxӑ i?_nB6\qzq3_ߝvo*8\˗ntEwhT l\l: L0^%)sCmd &hU}~apN$\vNH8IG;%,s&w$`,YT@EM2U:ɀ10:rX]">&K9)Y1#@r⽪UOՊ1o]ͥ@mc$)B\*LyV XT̗LH zQqU w5x#5Ue x'KNm'8KW>7̂dBB$LF˂u!yzj!J4-sڣiP Ev -)(})Q^ZIgIB"Y(0 =y#2v Q CK2ui}\O_Kص7S$J*'W}^0wu¾-h=*g(mrifCiP$5-4rƃ ]W$b:d0xmwKu~n1<2^cW5R}QT]⇓uN̯|_nA+˨0{ElPKjEm3XJ 6^ڤRSP.ҤNHq5nףކ~ժkYUڌvH銿Fߙҵ,l{M@rҲic"&ͷgqK,7R:ly#UbT&qW GT+UK̸ъEëiWd[poYP+/6D_~k&M/馧ndI!ǒkT,ʔK * d"{}5@}gMwiC]mKzX<ߡhn:Ң+6ߦzl3[`Co<)e&o{<2WPfO=a0Y=VOߘ(6{4Cw^5NۀKVa;ta=$mkBӳV7i/41qG.ӭYaNxGRaI];I8v}eoؘAfy8kqy'#vFbeW)ɼWʚ ][nFn;chhbE<݆x`wq$ 5^z5۹ ó͂gzQ@ySZTG^x+9N*F]w`e*奌2グ2yLkN!x+j얘3YnƢe$gjƬcKsmq :K<&*o9Q:?tzJw˰~4{Yo@7r=9\VIɧUL2.% CCp\L搑SAWՒ7wبŽ/b>moA j4N3YK!*YςጅDbDS`XLU$qd5Ev1l>+^>a"d 1X+ 1v5pvapVN^jA5=CTښC`Vy3pA\!Em\ڜ7 59WPD5$[=u푏OnJ5.&}I{ɨ] ?]~ I],dIeߣ`Qh ZM&ݞYtI:c,[<2{>K7DaYicKRIrIJxd0E6jDt{W6 /7~1023Kr 5XI$~$%S(eK6$ꪮzKE| Bzu Vd(QHLd$&E:łKs&9bBB̽k155J#EL.VEO>@}9E[bϽOxd >٢X`Y8-/w&JCƍUL3pr9ESB^GJ*t|NCyHRw3xY:=Vi(sVA-m=3s0 ^/͏nzjcYwꏷ,4؄T|a׿+}F7}u4EGFxEᓙYLf"i@%r6.Ptih 5(ŸV8dCJ{9ܧj4<-Y=,H;c KP+^Q[Uj-RuՓ΢{n3ngͥ,@U6d6EǸ?+c`)8Ŝ9s o O*б\n}a@tH!C$*8ʝ7\ 3f4 t:FY^1RIk%i7Y;tBfr{^~ ٍy0Q!MYq_,>gYm`rwogmrnލƓ? qڴ}$qPp^sfKC_?-h 3=מwtZ0b[wom{d8ȔzIq nM'a+ `^ZLfwBPYFi\p464Yrm˃w4 dw/ YL giF)*>ৼp{@U_^Vҫ"XkB.03k CmA/?_~evt0 XmvM<vq{p`uD9Fo;L0&> _-ʁȏatT `8 Eswf /T$*SsGtg9!(9EmzQnWg0Ϧ1Z$JyT[*PQȮX/~wo_e?R~} {]5e6,ܚ8^Zn1J[Kqo2C[ʗf@ԳWo~(Ŭ.3?G`%O"5BEhݧPDOlrg=$[/Lɕ2֛bS>OV|>PY<'\f0]8WM2N^oVՆ**g[eCJ1rھc薓'g 8⢍RGw\6,RPG`f;-X*IFmz*ѩh+FKgb۝x9ƈQ,6xʴMmKjw ?{"sAږ‰JգgޙJGhJ 6 DLT2/Wq殺_?AHx+rf2{Uˠ/Nr2/9DԐdQꁛ#;<$G 3T-~YKb<, "#0$%0DNV`T>G"`ߔ-a$EV 1鎃;XgK~J@_,0yV`/׊.k&w|nwCå{޷:+oN pҸX( :?r3x"38tNcFF`(3*h2:ڀmT 5T!ӱwFn g==NTfɿyvUZϴPd`"*Z L|g Լ[q lo!p8%b"XZYh6CY;k"u Jڀ_4,-jqףiw^~h9'WK`G 1)HmDtc 4EhgzDy+yJOFS}.6}~1xSk^l='ۇc)Cuҝ㛥%1k^"CB2DE y* [#dNA9$1xivZ^5(ޖ0phG}tQ"j-D!.r.<0%ާkwZ!1E:`uG>]ViǬQFYJy>b/1Kȳivۭ9%CiΗw7W oblzb}aZ _l-"RHN"s̔fzWGNb\4ѫ]q!pI}IEoy3ߥkuyHit, G TS+4"P (O 4Oc!Pʶ0J&|MFRVFMԖꀹ '2 a*("x}HQ?LSER;)X (;Lp<]tG{EV>6$3"-8I(rޘx {. RkL.X!o`5*E5Χƅ"goJ7RJ֥`^/շ!Ћʲ˴~&df*dU<+50 I./Ñ !>$Sk6+)0F"2G)(L<\| t^=[@L;:mqv!kɥT;m>T?$ Y0Uɧ"|gSS,"ŴjX.//ӋG9C`ѧs-r4򛙘_N|fnܨB]>[SvVBkeU1@ Gqkm8O%om-H'߾,r^; B =ۍ=1nHc7 ԅ (I>Gߗ9. (+A{]׆*9ѫ^)::޵Frۿ2/fрqvF @p*ʫIk9ŞFyH4#mlwOw N)XqQ|B2f@~%G';z榎uoӛOOV?9z~˷_x ޽~[88\QEˣX2֣5͏uZigs}˟⹷,R?G>cp"ۓOZS洈9ե #fZtͳ[RT+˟7!Yb& ; P}++m_viZ$D J5*:ـ ,\4L!i)kP1c6m^}qxm*}Qdmq8hVdl@ٗT@9" (`ɫ#8,4Yԙ#weN44Ӊk bi[Sz\zo (gk"j_MUzxynvÃe43];D#4]QF&wRJ1 L Rd%@a]YAN)tU{,M"[(" }"! 3JQ@XEV[+\C5[ëӓ|y (rT]5MV2 (1bDm1X)A@14a zMAR2 3\.EQK VZХަ>ޜ+\&+,I%@/ (`ac b(}L6G{N)!g?1 pΦMpd1ZR5yFG B4]2Dw/zVw,ݓAtP^<}:[WKkY4 >Z2tL"3F..9c XZ^PeZ$k'rdkЅ 3R& P4HNdr tVdQ?' $dE'FB`X,&plc 5h&ffu+۲EϾ6輒3^N]QVƇytϟzcsюX' qQ]x>(GR'(LbKȁ(Uq荱1*BzgE^`]Bb `AVE4P66MJYH(zPtgJdd%+VSYWWҿ5pMeEC[;'p& ºgޮ{ظS޽K)^ݫݿAzdx̢PhR _AlEI1)i#x V8]?c>N޿FGf/c>8aMs^=EV-&QIsFC6[ -!!Ţm+RQA. rᾩ67Xc_xxv->-{s9:L+lOl~ܙӏ|fZNUy8=~nwkfɖ py<`>&@;ַ|1a?lWSowzhV=yg |\{@9swY]k3b/._wL+Tz[TWI{ִ0m/!y6bzv[nr?`ݘ,]% yL$U4'}HksT~xbfcy{D3w]˱|d=_ttË X||إ4jz/R-6܌;c՘L&-{Un֢2cv!5M{ovȭt#R[n^;v3HYϨΏ.4;1t+¤p r٬c%u :vaqIrMׇNWK:J),x艍SN}v&IR@Q$/Hd೉%F]N`VR3UJǠ(!-RTYf2q㕶H,&֕Z~Ъ.w\jҞ^KLO^z}|6G(s;E˯o*Yz5[8;j}lhj7Đ1 p"A(QTJ 1#@Jľ=KeSĒ?=dʐ!)PI6R-c3qҎ4- '\VwNOkO.'yV^ţGW`嚔1] KXÙJ蔔V h3*#A 62LEd-/:hk-V1'fCvp5yNس2@L<\d:a {E (Ye(KިR0e0I rfKUʦVLH6 )ؐ=M,MƨELuty"`H'Q:ttƱY}8a?:mkc8MZuL&i!FCE" $O@ {W?"`b?|wyDPB]^$Ʉ%PpE&-ƐDc\ pDN6m!\[>qP&ytȖ~u|z'Ɵ=7| ꇰSns ltGlKgXc6vӪցwEOL?ti+S3;W_)GݦtnSmߙzNLWt֣GmV̔Rޒ*y&D聬6YmI`JUl12Ӝq%9 )$JyE&AK$fn^K8u8[$ ݧIGy/VZF{ԫ~cŁ_}oN:Ӧ HRm&F0` ¹b{Qs kpp8; *`0iP @I"xBX*h0lMqE+\$ U {Y F@"%"gg=~0X@3`i1z0c&L%]4tJrzRI0Ҳ8%L0@%.^ vz1x-@Z!9eeZgS6ƆY炕غs/B04($Վj峣}R;IoeRΓ)EvQ"$턍(gH3@- `8LL'=ry&E73"5&Ғ5&[cFY h#H T8K͈lcj)<~ѓ}_37m!Ik% Yt,`5(sZ/jrG;x,^o!Ei%NOchu7_,jp/2?v6Lcb̖KWDfd%͓M/c^u=_ҳ޷'H0V6-jvM_ tnJJ#JŜJP*{:~àZ$EtZԙ\"V2Cr^x(Пqۧǟk1=N\gq|I O,WZSkzѥ|1WYt:{Ns7{}|co~ϭ *NbJG{?3/'ko̮͐Ivb hd?:aPN%99W'v2nps28S-~kd6 7ZOHt6zxByzm3+$7Y3Qyl ԯ;c͚X~yWja~*0@ ,4-7s$ G[\~ 'ƞ@l颩 .nV{dZ> YZ|4,&zrg?28eclu1Ȧ^ƪ ,{-˸ܱG_MbozYl43חUrC%n|.?8:?syww(gowߠ4sD{^ˇE6]s w-gۛF=]U%#W$RmR >3 '>4Y77֜%7yur/,d#_ޢzM֩k_Zhkވ,H3>P?']޻z>QWn$M)ǜf %:%hL#66@uF=%.<,pnDL@I$#ʢN1Xc-quG'(9̝ ԹD,DJR9" A\ԶrhV̲us[{?c:eY8\-[hP9FtƨĩA)) x(*8X#C5"T&wN[2Zv޻yY S%OX{,jRFء##FjOg*fZ*?WdZ "&TH6 %eeT!IGX*:.0^ᆲ "v%z9ǕXI1FoDo(W^ ]~--f]8N7V,s<99^HQt.tHM.x뜡DB@( zkEP,sQ'lAnrQT.9=}. 1Vs IL׬%NQB\,Ӱ0EkrZ'k,=75?)F w2Ce-9M"ZPMR hR'5:nڲߤV)nQ 耴1=kh 6X[O(JKb(WuN~T~,)c^Pxv3oK'7?wNnYOd\ 5OfsFs,T^39pb`d1t ;fA񬍖N$QÒzЄ@I ϣBf x"^f9 . '-a_:xhJGNM򠀥H.8!(Q{VQy(c N:N8퐜9LYhME-vJzQnƆLLCTANrp)_Tq,M~7Vn H4gh++VFk_qB 9Pnyj]ݢ4P"5 B[4)H8^*"IR["J)gӝ**{$L:;v4RSL1.5.`a,ATx+9;Drv!z}\ZVۏcB>Մ&3|j 9M3cݮzBWALWWS|;d9?%VUj%*=(}}uDo|]6Q \29K,Y4ipBZaT9XQDISEMLNy4.jg1)PNq ܻg)Ea(Ct_97!#{6V +Y)>( R^ J )ܘRIMB@%!ܡU{j~qpgQ#AfIs#4(T3^a,#-g$9'*Ņ|:DR{Qi)]^ϊ#/û/[`\ID,quNJdu`8cGWdec,pR\P y0M!Qq 'ScBJ#(O" /2x!EArPOmZ} REBPSPiR%q\aEbR0l DAl.PhpEG [/J3XE;Yj83>)cm.KIh &Nмs m$W^W'mabKm̷{<؜[X4IFQI hc"s!xcrc:_l<;0wg՛ñIU7Q/XY $r" 69R8'dijsYxj:HHl;W%Y(E4 "IHLt^҃sUg~LBrٌ 9dV;X&P&ܳ Z|L_WP t{YScG97vY}Bb NzI~w]7ٻpi؀2훪UΜ;^[1mO :¶\P+k[F.ުJ;Hu$He pש,:ZOo<ĤD+ha<題3ę?G?u&vn֯:Uͫͫ;;yp*xx1GJCO8E$ ݏZFK|eO?N/j$HYG_Y3o[#Wg_ /Ԟ/E[?CM3$// G޻uʫe\ݎu٥ojIo;?Y,^noQY-NLzo>T_mQ/]Zfm<=j݌KR/ǣk\=3[ģa2XZ %ޅD5"QmÿisHgwg?xW"Sn4 j0:?歱Bf/./͏,O?כ +_&xI AJS ĩv!ă2̇%P1@C5ʖ 5A)΄/ Q娀X2S^p^⸂P8FVLk^*1U$O'=q`}$~T{dO\;art(jDX\^]` gjA%RMYԠqH[>a[.䊡{,` {6+ H4yd4 ;ťjv$Tl= #BK/eO?R( Dwb5/Y+{cObObO)j(]9X)r ,RCB YH69Ar=CH.RHI>leĂKz=, 擧YM?eUG/-fNS!,Bg3H %S͘a%69xE6jmO<]ҫvؗP%mo)aC$0l = gV¶{)T~WhEѕϣ#5jY \VոMI6hdD㚐a04 '9F- F<J bƠ@.SL =7CƹܙQnݢ:5/uХNBlhΗVcԯ'%,:R &]9^$ɲ !vJ,j2ΪdS:@ggOg19A GF-7q[8x</}lnټS[⊚{DA/? N&8!rTYI:ś(4' hf 5rgEq #G e Fp2YjLW o(Nsq $8e+)܀ٔG!e 1ZȑLP8+Ҟqgol9|KGӳoasܝ~[gzxk oRK4DZfivMYxǧӶM?uZV+B*ڌHל֓{|6=\0$[ rw3GȤ:Ggڱ]m}-Z^v}(_w{뙾zS^/5 c+W%X_ܺ>OvAKv3æeO@<1mybsڡ""0NJW])R)jE*EʕªZzTw]GwՊkv]i֟Jizj/i.> .0sUnXf`p&[$9{BB @Hqb@?~=np6n4;aDd-lG3{{?vH)+zJ2m/S6E)}0Ï=X[P.u.D lY!śJu7F,DgCt`Edr Y6d.Dc7&1cYCk GYQy<>]>7+}tqf]ƤCOY!s~י}-KntI Q+STeF2WfʌJ*uՀ2W4T+3zeF2Wf |2WW+3zeF2Wfʌ^+37\QS+aTQjR WJ5\+pTÕjF,X ;5F֨".ߙEZ3׭QpkA&5Uk2Wfʌ^+3zeF2WfʌlD_b_eH&n88!y3q{86(^ծ>7i[>y]Kxt'adb[{*[T92r  YǨ- րLb僐K! 4p(3x!dR1qi5M>jrColbXe@ܿf%A_$V,tKncry9M&7~цk0N 2@D!Jb1d Y `*dhm^"HB:}ЄIaP YHĵ",0tMF3"#CHJb[!]U(Rܖ ]*,eƺlI19 F c&JQ٘6!YYm?Դ!#S ;&aLGe`sR$%0r/ r`Q _C>RN3U2DiL,}NΘs e + 9%.M+w&))*(:9u!`c=Y q(DOÿ_~kߙR!ܐWYtpP*9K7_~?8rHosYj2=М1a><[? %5)& %1NaV^LsI:&Sr'{6k CҪg.$_+/unWq&0|&hP˱yhj8/mns68uih_oZ/tp?=9_AI =} E].ڿh,,~NMqK4{06Uٕ ΧGW H}llە$`4r>mrI{Fo'ΞPIW7.hUYޓ4Fbǣo\9<Noy8nTgnu]v<&]:)ia#cIY(MO`m=6V9=FQE:۬hvyG!wן>G9w4F`B ,.<]۾] V}>|7W^7)5 !> ~9nNBp|rZڕt1kRD<,QWHl2gaD+߮J.t/օ,L~ P@xl PV:6/n.QCy)ARf1pɽ*0+2FC(\$ 8/+AZ$mX5?1uM﹨^ۢyHI=JYK)QDb#LP3E^u&iz~@k7h Vkg ivCvbiYtK /=O櫇Ztgi w|SE :XulT"M^Y'3U4W4ۙ[3{ڙ*y&$/.gC&N;At(YDtlJBBsi+hY rL,pz0$FoNYxmixI )8NX%n@[,ɠ|tQR㟜}Fr@nh4=dv2!@iA6>xLZ0^B{#bTrp1TSDa΂Y҆ HAP5tYļsP?Gc&`eFN ZĊC6*Z陦y{ &zduhp-ł:≏^-Hӳa~㗓hۉk'NJYL_,{Zhetj0Dl|9|^5!hTeЎQG!ApwLijSZr$)X6Vri2~v/̍Z#"PeL"S+k2D$`>|i/qփC)z{2 +7^oʎʤx4s1AZ))ؒ)r 2jJZfQCІsY-7mpYp# Ș᠀zD0+uQJ& )Ț+#F- ;(E z^ <#$UY7"lg}Tڨ% 7'ޠX- L"!8sbQ!=_9/<4g?c}iVޕaB߄{BWGipq~t֡4(35u4rF)Wf/cj!`p4ȭ^^6<u`"M~/n8zC^y~+r'<~9 T5{Qk wWKNR=?"hy+6wmK/\ޏA?eƴKJJ==$%J֐z,Қ$NsԩNJ:NwΧ /) "3GݩHW:\Zվ_#YN]Futf#]vdZ/Ŵ+2@+Ǔy3/+5~Mojo~'>/)>d~4<&> ( {OW1 ן~|=k:$)5VdE/fէMEeog!=xaɯ޼]||P3\GWkwYMN6vWp _ ۵ǵk oc a{6YʯZ_b TGLΗN `osRwhi=g=~Ih]{:F XG]њJGS0tdt;RkrmLXMb6=Īmb}ڎQ! F+Y%JP\K7Ŧ3Rb6GRlNmOKJq!Q:+JZnPD* yZ!P0Om,qI3qvOu?&n)55EiLҪkBmOZ/PDԜb\wVQIwۀlTxNv,[tg~Nަ$lRl>xUuEIY7;9:_Zme>qHa[Cwq_n=I2iMFT\$!e_d2NuDVfnӑY s^O*J|k-(kMv`hln{i=pջڽ鷩tB"oknFKᆊ1%2S5 w)/V \QNQ(ZMu6|g`wZgb?cQwM -ߦðDڭS91iʋ..{=Z9h/g"B h(m囎i@ZJw ښ6OnѰlm[`\U ,N|6>ܭW#e\-n-s!]݅G7ȹKlELVөycs+zzWN::`6v25ͭj`xZa˪(̭߽=oEחcr&gVEzzw9@\}e.ߓ3˒8; t,xˌK [)R8/KIAg= 39EI;tJs;3q\(Y+_$W]LYHl]aVqJNxږPw>&kC6÷G}]5%llГ.s79iUjٍYʋ`a3x,XhF,|\Z0d}bj,"Ãltq vFV54I0"&j .Xd0lA.%¢-b[/[:q{D-,(m| p4B(1̢Yب`EDPx¾z}9hcߑ9[Ί$wE.!Q!Ff# #EFK-m紴g0IvFUY0˥>Yc: 8!MtGy0E'I`zQ̼sN2d\xc3%=O1{WtQs-9RckWIJaã7O&3 1TdeWpۺhmUttAH VyjOX J3qS}kn qt6sm^޾% m[5\|ݺHz jכ^Č|!_~|U^찇 laW=;Z"g=LJy&[{sbϮM ;e \qL}*%^!\ V+? :*pU4nWW;(Á*sW jJIc0#:`bWU\f㻫 W >"J~䇍~Yabt :EL}Pcgd~rl03L;rtIF\hoׯϦWpժQcB5 \g11LNW 2U4G'i\vS5N)M"ugYy~鍕tyq2>3ՏVy_UDR;Jсtlxp#hɉΘ g@Kѫ X}ڼ,>OOO*j |}θӏ߸d!tEdёdsFQ^e'ȫ'ie`0* )7_L;f_$G'=]9%U2R ]T:uALOam,? fVMI)CGTZחyt;IJpR{r'JtRn$t ͥ;#eՕ?7%U-mV;Nf@l9jMh{k@ڴ :{~qW{1jjF|in>:*~B~-N(NkRwgU)>O}]<]V '?Olzqܓeo&J8§jGc(J ɵ~İ ~*qZrCny>=lR61n6Zje/Bْ2yœ\GZyGȽ$\'Lq:]5BHJnIޜn'_7μ_QD̻q%E<2xQ%MXY=SrOYt=6D͒% rd>16I"0!7,i;n+!I݃r)-j’n)m*wJ!"Ǔ/_aCTR1!)q %QT\&!2'+&Zɧ|]%B ƌi,Bhm,*j!RR+"Sjg$gOaFs(39ruN]@ 7נ. 1IBBDv0`0 .hkC̘hX>bM}*2L`62By;FFc _s>;ovYc}"*X-@I6rQg 1ZZUPH(Łs,ÕDf,+Z ƣ5 "uؾٖxWΎs>K6;qu8~1BDŽƩ2"P .@H``!.R6HNY(g!XAaaO\s/FĒG! Vx'ռ"Mr3Xhh> @]0MG3^\wdv9BbRwIm2*VN kiG@ʖ’ - ).IE F3`Cꂏ yqX%1i%O!']U:TEbȃcPmVa!)vL8:ۢ4]EgMKa YP&3U@S9|VEHFY XG . -GrDPό{^d-Ll XXʙ3K ie[F/K`B]N*Ё'oU{Ces C8#LA@@DB fE`ׅ&RQ5axf#a`;̺ r ` &CHL:8DRP 3E6Q"VV +L_@JN 7%b8b .p3SQ TPӑmp̝J F.P5x0 S @^x5?!-Rh jnܰpfcuDԊbs6VDqFje/JګQ Ko 98' W#F䲩%祁9q@`Ny6Н v5E=h]&su,x]Ja|d}$(Zs1`I| x YK8 )` S}+t (]:%-U:Xu򘄐j8; `U5!t^hMr Tx'"LD">I6 S= -=@`AY-RZ[FPϠm@MGۄETg2YЩ /XF| S,0h2Hr$( ά7#'$c_lA #AYj"{'ښܚW3R7+Y7c='"9X!wխ]9iq9R`QDԾ6 ]{ #@7Pz;KnCz;pi!/90記MwK> PmQV]k ) d2-x%tMކG L3d:(R%lPUM9Ռ:ZN ;,@kDڠ tdE1vi7F H@8K F|dC)Ce4)E+Y-y7 s.}(pXeB`z;|*i5kNO4eKH4M`QI4Є7֦9uzˬ݋.ζ\,W039b@#>&䭗P61sɺumZ7kҘGq NQW>ih2.64gt[%Z0umMW-d0FL-M ~f(xv`)jtu.H֐Dk sQS9]V  r|sC{4|P1֨rJ^.QeP<*dvh&=;`[p}z *dOO+bE"Љr\O.FN~;.A^gJyF2axaBl3B4[1<*ᣚIUkP1^uCP27iâHY!=+NFx hNiu)jndjRk3k^rPqQڜI<(1gZ2QTSEc\,ߜFrr5WhC6h TfD>4NM Rxs A+X;fY kC_26/MaD7z|d&3th!,2 8#'@s+ g /Qhp4i3h 9$VŪR[`(.ń12fVBM6R&&86DM,S@P[zi56\~@5ͮH;h70&T0H5aKBur7N&+{#D`2:躈%|7ҋ]>;9V~_~~Ov9jשYvڋػ8Tˋ-m t͓?adldÝR@Ő@-\bOF桷#rEi<~${pD D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D @ҽè[ ԇbjzI ]z$fD D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D zI ݄K"W/Bċ!ډ+ٓ@- 5@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D zI /@m \JI6$z$:ݨ@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D}=$ݥ~7oԴu./{Jٺ4_~DKf/\BFp Ѻ.!Jo \z ҭo~z[6Բ^V(}po??:h<|7Q5v.;sۢ;m"c0گ\ dhOJC T@*(tF Se1|]~!$]G[[84/aV?[īkMuvu4 yV1iLfecڽ7ퟋj7hcKo:_-27߇ˀ8I,9+;\BNޏ:^G!?ݭC NUf:78luTlM2EIȳ*/EJX1rdP,2]Bob܈%I *蛵gTZJ:\+M)|i9fѼ`vNG ֽ۽0ijr Gs(/eE%M] ْ`%|WE36mk0nI_'}.3z KqBUnrqoz%ż:Q}#}.G*mF'W˜X~߄ ]jhk7ϞuejY^ vq398Qڻn>Jϋ>oBkUeahʚou-l4GE>[/.m 0tmzRV Li5^,=sI\>rJ><8Hj}kݍi_^\-^>]]LGW/ω6&ڰ 1쿇Mo]n\~&Y+Ee>*Պ?6xmybxЁ-<+RϳM X3Pµ/fz{E-JNEq1VvłG~;iNьN64[TNXI0lP?w[W~&meچ^ dP>Kimd[6AdW嘶CWNjB;d'MNpg|pn5v+y[Ƌ1^{ok7yq[al񕶾x7E{l!3C ovV^rXTVLV GU㎇Pd -~ 9S;H坫Tw=e:M?tد޲cr57T:}K4RYH'#5z9ɸL8 TLsɮ,Y&!|1eiПe=<ɾh=[]Ob\S2 pvǜ}֒X 唫JK%tE1JI*lT?A  g~E ?='i#s_σvmzM)tyg)wdvϊ:|L*x7*mHũcp)Y| &vOlYrBrRQX4Ak;X gw ^z|1\}֯Y&h8ra Ϸb!Ȋz!/^Yeb/Wn+Y>l[p JMjrr9r9 ꜜm*j&vHPF0 94uP9,95//>OF`zh.;{T'Em)q}.l7@Σ=y/'3 t;IrkhIzW.[tyhBhS߹!6yqpwnv#9]޳ɜ;;Kݖw|} =r-nԹ~}4z3y ԵE2JxӌLN.&E6_[tp|>ѿƷPBT s5xUP#kΘV&YRItiLANXW gρj)_-SQQ5F9@M 2β!9@Rs׎T#US96[+/8=:^y0R3Fǔ({jgVbںs-j Auձ\]^p܄nۤߖ |@:bOo/Ҷ)T "]],.t(.VrA+yrZʝtf8[BpI]*G+8φ%-qMдtT`3,,_Ǭk?z}yyڶ3`Ѿ_t7gKwvHyzA|1p|)h}(W=mj6kLrmfm֭.t,h%c]3]5tI`l'XZ@Yh%|+9s[ɱNݸ]NJzוqbOGXUhњ1WNnyMn*3ɷ3O"gk#5OLji~Ye9Ǚؾ^hC0 gEZ' U`e:ׄ 鲱,kWITM;}jxUK*jbv"ۓ{J+Z-kR-ލ;?^*B1R`LA>Bp[g?(W4󸾲SCQµ*d 9 ΑSXW\,LyS=MuII?iynkZ%'x"=/7,*)8ZGa%,!AijH*Ұiӑ~P]:>NWt0+2c)P.(s#msQl\9ƹBu~9m/j:=ry?vi`96J#${~!ԋqѢ5Igr0~owW>dVpߏ~Fy6 $lC6hLp%5tZ%rş-(|!1VbDmC>\)\?!Fq0e4 RC?p|:8}#g۷V砎.oLKufo6 ;L{݅\<Ϋv|:'.?O(ޕ6r$Bef.wF +{`lcm4hSTH?CQ<};h l0F<;YGnL=ek8rS)qUntK :.ơ;dye\M)-2fzaTLFAӳ8B@o&9ʲ"FKEZ LY\X!NP$t`\fvmW[dՋL[j%3V>LmXΛYI.XX}rO?u-g-0+^nRn,tQYX6'6#"Q\+IJ<JN  ]ZpX8ۮhỵE#6JtO3n a|\ve{|@2{sydz%CYzxaX[D˷ SKokjbJ7I ~34DV5t\E-M{-wHZNtĚ;wGj3{/[il %+e2c(q>haؙwBc]NzX5_ 7 lP@PԤqx>8}ry%T6IZ_+"˾1ƚc֞1G6o\RI /poB".>|{}Cwcǣ{k<IɥU#ؔ | { R&hM#2wV86j֮QZ{@ǨciH%Sk!rgrv:G\`E0Yn(e( Em m^hT`=IZC,]ۭ95Pv5xֶl U6.xB/Wm "R1y4aTg3#y,s5bTGe2fF#`6 <AI' F$ &.pddjXM/iv BDU6Gs)6r>r]n%[C ymuiFpoc)kT0&ڣD414pا'=Szxz9 x@2 e@QPv:8ms> 6Ll 8h4)Y3NfQ5j6-&*PYijE0vVERF6Z|HM2q*[yo'"c)BvLdƐ_7jW077 ǫ{«JcUX$?6 OJVF;Fx#D( s7&v&J;Q2 DbdJ<;mEI hMP^iZWWExH)!HiטH'1˹IZIWQV#gSҡ'g#2̩[!t)w2I?evZSm¢@Ok/Q1m^#d\"( 9ͣRdI&r u,1!\[Mr״8r M2Hr@tAL\C4!G@E4mPXd5QFǭUU3Dz -b(˹$xrL!P2̘A}YQYΪrg fMkԓ}YF*FyEAs Q$F@8@s&K^jsE3h`5W`R+T:VȪ*uhe Ђ.=ԪttHz?iQYJ"M(U8^h\+%aH@0*iH ⨢8_5xHv= oF[yj8A%%=T"u.DB2 1S&Gd^EDԔʱh594B< VkrQ󓢱^+i hH. CcaJDţx,AU!YY:k ^Ma nԝNCvȲeŻ=ov枴@d,d$- D0nZ³;QknTrvXga³}L5G`+ѨBǢ *T"+TWDuD֌*rs,P̡BQzJ8&uUhhUVCWWJ#{uՕEWWp[=-%~7}.qM{>q|vpkt'N8p9`o7?_~>rpy1 #z9x}-ï(Ybۏ~nH}DjAZjEv41B-CWӅޫף5͎I]~*r8uU*uFuE1m1hU!xbvZu[ J{uՕUZDO0(Lw.J!:b~1XIJ 3ʪȸ0"t.j&3 AJ0:K.sFr0O?=e p ivqa3҆t/~·Cz _~:%}8f¡K JUwUnZ(D&&ɄnӦ Ң{Ң3t-Wbq8=\[9rY 5iۮEw]HrעE˛zjl& @2!:X$yFsfP"v Pȕ1R"bi%mޡC/J|^e peQ>w]m>?"BՖj8}Zn]ݩAnDVԆŒ7de1dǬ,2IbM+N0f>M c 7Fd!J, -C BB 0Rx*-CekEb4:,a{ImeM41YC@%L6'&N޵#뿢N;`dgw3$5e%;oulfIHb)M & Il=mdOG!8T>dJvL E&e+2Rȃ>ˑ!gtHU.f138J'RBNZ}*$ kaqdBpka}V J6Ã1aVsT8W"mvm5[ T?&'qV4'P;q~!=q^.xf+;[fKD~m]u=}|U`f2 f3\߇<V]q Ɨo'[.(,6f׎$.6 7Z%i1/[D+XVhnГ1閗VoQ/i j)NJ<2-x |zyK74̃"6N \iFoTM2l8=9H}_~w?ȸHV`")MSSNj&",?'C[ -=Y ϸ+o)4O\ܖ0$^wvm*T"פ Mby0λhgdz^VXb&K9L 7/ m]@66%s*t@(-sU .m*Hކ* )9 vl:Hi.@VH"q!yQx25T t{5J*yUhtɾ\t[jm{{?K3t2X4@Fp׉(LxGhh❻,Yvd_A[ec "jsf9fgso) IBRҹ*苐z  LlȢEd}v|VȌNU}YW}O7y R˽Q:Z(%YO LD%b & Ɠ rXx:b`}xom-A;H@+CZm1-0Nifql4m~GkxEҸ "4;ݐKi4/K:t>zk iCիq/8X^v qq@ ȄYt\moi\a+Ceh<SjWTgx`۴ȭ>V~7?/?78Jz{^?w{O- -byf͊}P橲q+>]ViᘐUِbJp얟(xCFJg-kJ^ OFKRn\Xw(|+_<3ZAt*,CvԞ3rg< lRǃ & B(OYP,2k)UY`ZiB_nySᑄnH.Aԇ{kkKISj5}p뢳o7' i>=<Ft%L{;6R,/Xf);R+Xėfu$*%V f $Kq@?qbBBomDa]}u!]fO>,_'Yt==8`L? MV$.y6J*˔̸KYz=;)m~=/)$0ʂ|Fð_4tn{էa)r`[闹Y徉?ojYe=|IaFf#4)2,FŒa\+ %L^2y ) g B9][pX<߯>/un 3l=mly6%lIϗ3{xȲ9t_zL 7--?TLHz_UCUkRRa0 5,IVҶ5d\y]Ѓfr?i/${Io[}uIE$M&tOiަoY Q+Oa>VC[f4W';NƓ}>lzo yWR0/oXyLwc՘N.z--|gN]$j}+Сyxv] EM1o%I7ݮQ^&VY{!dD41ZhPxˣbʸw^My:%&L|lWlըSHaSeqY hdБNI z͈kO$ {/>/g<3s'ZY׿cMJ>%2`e.% чqBLf"##Nm҆vU~6 hoGXH ," IDj5Iq\41Ev1lX $B k%Aa0ll@WnMgusM*4]۪Ww]1[7j EpghuI&m!XHG<*51@{RinDO6b'9KojgeTmXm:{EVCe, M/ _=f0)Ջ6xcgۘ*퓻I&֠p2%FbG!J%Svk!2d2rbg0&w"0[͊,i瑃 ^Ħ&jD] KShlJn[b1]ը@-v6Wd r #"3%Gā % R9K\cUaV"I 2IVtɐQmIF> ɨNtv֨_$X>DKoZ"V2m$II\ֆ 4"heI3#\. [oiW}i=u4S8~1;RҊ7t,O')L4z"Y+H 'GgD|kY\T>XindLqFSe m:JccsZm2J EPnkQLzal~k4 , dYTuCfK@ Tttf ǿz PvKců@f;z}`/FY Z.TǫX8ٍ/J[F{lBgd[{M w|[= DE`ʠA@pL3^BFhlReLTAiY$WE`$,EѴ9hH.$$c&($CզGz0IxIԕ<_9mLt)z^js: ,dnpB3EK5V_(eGhOIc{1rc I&2 uVD -jLrzέƹ[jq4114k}&Q)\TdXd5(sTUCpF%w&cu<++fZ$;QϪMgO>kꏯij)xiN7V*D4!A-=DL6'8"IZASz_ r/)5W,UR*NJkȪH }V=.nf6 4%($JI.. ʫ̵$ 1E^A V *SU˪āI٫I^w#,5L%%=$>" 3` (B.d2Bm<"4Ԕ2ѝ9[D|{0KՙVqA+ ,)ȠHH-8VPB(%=uH5 9;deߢfRkԤi,~<\1%d') YdŦПiq㡉)~j̧RFLFBbCX u^@f_ݸ-Nf\+{BPO8管^ i2'e=#ޯT~.6ʮ7o&Ko~+eҥ|0ڀ`ɘONO 2C܌\8f*NfTj{m䢦L3I^h0k9שΛ {]BB @NAxt> mDN6$Ϧ){׋?nh:?O.~[/Ï;*wEMv&3G{DtT^N#dR{j{;fCع\g^ I/; 7 7֐{VZ0H1+CEV^VA _i΄Щ7H71Hss3&b尝u&qe<2Ǔ2 L,UXB v#X_o1x҉Ò~kgjzs5M?Z=-=SΓjA;ϟ=+zb`Sv$5i@(M!3lqn(c}"EV16IvQhjȂMxW_q؂ˮU!&̓dIHyzs_o_DEVd)R4`$*(BN)C\P&zQ@ӽLZd:@'!2GTF m"ӔYOε[#g9~0Xjp)h>v?7Dv=ت)ƺX}3ty1Ur|6i3}=tU>(#Z@U$::XF"Kb`Af9I[4:εs ޢS[r@f!TғP>9%^ p0EsgvV^wyj6ŤbboYeY"Ue]0방[5ָ^4r^*I#}2 -Gr$SPh%NjĬ &$%t2X x "8 g*4Q,lb @4Cp,%g29q JT%h˸5rV߀zק}ڞ3 @n%nV^Qk"$mTU u _Ï.OdGD`ǮuEդÏ쏖/R\Jz;_6_ȵzWZ* |!j *ΙBۮ f^Z!uU|gU!]QWD-<57l 5+ 놏d_ 9zsFp0НH]]KvN|wbLd-b1tv.GYqX43-))B3vEj֛J`{E -8k̹ b;7} +WzԙWȵq2"J@#%'7}˹p8@B^{q+풚&-QӅ\ewEMjv5]{5i9j~ mH݄’t@5!]_cA[,tɸ=ֲɉpg0jeAT>q 6d|^\Ч&ft?\ߨHXl߿]fF =-Ǯ$v5i 1Y% x+SJ:S$lf߿ :ioy(&n4>y8ߢSJuLpk#W9o0gi#%Mڰ8XQu"eE4RGONPv.a}۫.H\i , xdg9EӘKXl==xϵ4"H*LlG,mk@ x2Zν*&a!rIDviQ혇ޡ(bf洊gZfFf2zkc1yP <Ҡq 4aߍ5B < Q?FхGNd+g4k.{QlVsv==̭ts;$.ucb[ž@nnUv3,Hh~,XVAw5uQ{{^\낱Zƕp^G(Net}:vS!?Px&}nrgĎ!ut(?;珿?yÇ.|xV2`$so?=kVߴkiayךAn&|~MC^]s6fq$O_Q,]}rؽ^5"Uu?u+D6,-6m5T+>L,>[\?O[PnA&x\i<#)%tBuVFIJ H?G$ŨZZ`o$m꼪? y%rRPjhb%ura'%;FĔ'S37uZbC]X&x↣fߨ[;QSll}ƗfGyaƗ[._U//L0/]Y恇 ģ L\5 3' E4G4l [ZlEU6bQ31;i ](NF 1)>2MYvgi'퍓p<&rB)i:HV$mMͫA4N@r@fW])+NP?fl$IA22R xLKSro v%e{$%2CiG` LygqL2L&J M<zUeY6DNO*`)8Ղ9\[q<)c̒CdZ#84@֓bH\R8eJwAqoAyn Bxt .Z8 #O\ҧɪ{u5I~QY2)*ZuX)+gPUNKmur-eM׉!i@ `41Vun]_--6xdg^ve}'3n뷺@z:Pw7Ko&MNMz"%ɲJ2gp*̙>{/k&El<f(&.VߊzyD$7FHE#pT 06O"g鰶Jh J;=+ 2B" 3.3e9읏EZ#瞷59+hz0hH3)5ck Jy]g m˺uǗC|x?I+]b:' * 7u"(D4ކ 0;/PB&sJkϭI[],E0HʬU-@P?!)ڔD AC2•87'i1hl5rk09r[vkܱ֖eZ{`xELJLH##7ܡ'1GR %|&/8 Iv*^a 6;ٗ^ݎrG= oQ[rHMYn6êJfN VUfTO C&dȁ &] J$a" s>9I;WYV~}8wK"jD^W#J4bӈ551ք Js5\T>H*)e2!+@K9RtƮ}a۫ުo$m%BqWe{_ϳq@QI%zǸ0Z ى a^Q2cX wfh%CZFVAz*hg!Y~>ˉBHW 輑T!:8)hZ"]#]̀L%F|.9*TAiX<8mvC g>$]B섍ܔg{Q*^"]%*)LtR%oWW]lt=|sQGL LqIdG*CMBɳ:GiR$ZcU%^Rn T$)x˘Bsj Gƒ3ɠpN%T" e3 ANAzP+\Io ҉1 #MHVY/CY(F$5 \I)*cPભ7R#<58E@Cֹy,q͌&PBbs43wI[[&T["CăqC!HX'h C1Ǣf(%7'w`lPEVS*K|7bXgPnޤ[%s Q"NV -D&VG+/"&)\vKâ @w=4Q9FYz4b-I>g^  cVA åކ{_>}MǹBiT,5ݨFOFԮXi6MV݂Z]šݨӔ6HuHOH.u{F ͬRUu1wYr @76pօt ?WCY eEY]]@Wm<2еnl:(4TGZq2[-1MRw{~+>TT& @K;; S!SZW /+L (+L "_̸VȂIf1gz2{:Z,|Ҥ֤p#r!% KeD_, \e `{n15Zi&M]k8eICwzH(Qo;jκBuךOEiYߧo1O1QR{E\w4`jhkả*ƇuWFiY 4u2Z[{u&%GAx3ʎ%Cqf3'L;oJ;7~Dhz5ܗ_]wb/5.t~</>,_Z.^~4+?Vz*ӝ' ^F@BfHYrcrYFm%Up 32q 8Vz\3= fazznwˋ3Ζv~omZ>Uë0DYeMn Rn,Ut,Xb'mZ %R$%kR RdQCnoP@;=It+ovF8,|8 ithQa]7P۞vzv&Ñ9nE2`6<`>𬛄ى P/aY҇Zf}zI /R2d%k[C^6N03+5+{46] Gvu[g#EEӸwWЌΎ? l7 cѠ86>A:Q<'0x8'ke^'Лb;raz:r\Rњ$P+e28˓6BL$9s,˸Bi2qX m ͹ oi`0nPhӶNxoPDP`x)g[?n+ d#;a:HNv^ )΄%! ?Ǘ޳?n.%I2}H YB٩pLN" \*»DoC]s&/U+(vA~ miC/'c#HgϓէѿOl}Ǜ1? Do;n8uEw?|_hQ2y'Ѱb ;bV1p }N=t{r| HRE3DށF(R%:* e&<;$tA%`}:.|G]jG>^M0-/d삧>x&̣dQ@yQߑǠI%{4:P) "%0L+m bx8!{#D+lg܂sVZ| m1\XT@1}9oڂCD @B &qXNQ`BYJqDB9,$8q}?`9[St>eA 3^䥙 էyb;{)c rcet'W~K3AC.o7ЊX+ t;_nwM=7AyPVW-k<~C/z銄gRizpߟYEvհb{(YJ$)Vҥ]F>2s9ҍܹs,{p}9 }P_E\7D-l]\Lݴ|"kzdaGo7'-:<{uՋZ]-2;7l| FBUߐ'eR{#j f-$竛Ph89r[~jAm ߂[~ oA- D+*H^bw鹞aC`*TwJw9"@{cՏe%L)\vix}v:&G_|o.af#[={YOb^lRkE0*XL"Fl`Ū[ L ԚsOڝE &76w/%&@ɬ-Ѳ -u>]&JA_Fr:24󳫛iϫˇ!oK^Uy-907d|<$E84ØOHf9 AƐU4"fg\IٗDn.off:_-y(s2XY2Ⱦ\D$G"lA 8_Pp$-N'Kx<,aS !eMV,G/x TP$RT6&q"?}_4"HwLJ\1RHpPa)@ rQ UGE T'U"dbZf99c"c!:H@\I\$Wo`J0>f٢.\]Vg\Hz?K~S8.{Q,^|2]zvt8pbQ^!Y-JuV:R:V4}6rYL򠘍C~.MqSMs Nj˃8<;2GG?}#.|{wqB#0O&G a,/g]۟ѵ5ZZk͠S/|~MG7,R $o?}7XN_bOp *z\}&5?]lgE+nJ^<քX[وv|n@eۚe'nK}=⋝u4qϼt(_K\&{iU"a#X J!ZB2G hUwARՆkZ+_ /"n0(z[u`5hC5R! ;SNMY=r^9͉k le͍vk1g,-wf,x蒹t %1=M:K+e4h"&'4kN4(׎2Ms'sC'sɜ.'Yec 2js9fg3prBq r6I*PY"\JaȲCCt>-hk xbPLˁoLU)&۾AnFy(W 7"mTMРd$=Q_Ty qH-vN3ۤ# s&KJ[Q$] 0UWKKDc.1y![ Α $Fjt*pC16T^&A ђQ#"&fz4nSx\Akw:=:\ D^ُZ6Q嬹 H(Аa lIq. <w& ǫ+ℏ',e 0[qd6cP1 ɣX#.S_$ƋN\>jr/ p~ xd֚l仺p|çv^o@XNYςጅDbD{Av^34,-'ʩFaeV0yyLHobG:( 0kĹ[EJȅM.dV't?52}Ct[_p5cVy" dJ.3e9!9읏hm&s_&#wON"&h-! }B3xPs-=/g2R-c5qH}ۉRV-4-|R[x|~^VT^\ܓ2yv9˥Ѡh8[#A1%Im e煖CNiŒQS8ȘU 4 %( AHQ̦&jWܜdd=N2VĹb7ˮXjV[VVGfX* id䆻Kucf CIOd`UaV"EȄ 9Pe2dk0E6j0L)N.ZMaԗS1]C-"k%-bow־T̘`$y|JĔqނƒHM8ELV0p3 PlH6\ F&#E҂$C`*[jm#>vfR-mI @)*{/Ү>UR$Okm\^&8LBr僕u1Yɸ)s}r%َ'ǙQQN/<+:z2R ,PTW9(!&Sr,8 r[< <\EMNgT]@ZcV{ ` ̵Zqbat>,^[Ǵ<76.>jtQt,Kc;kW^0 ǫτW ^e2G V@@'S,o ]a._x|jAjWy3}ޟg!}S:L/%<lj+_~q*Ү'6W1V^i&I c"]*zzk>|%ڴyxw `H5t6 ]6]^ zX:F@֐o@~ Kkw\?/>j],nzyva`Et<[?Eހ䏸*P~08OעM'RPd Wտ5Ukz,τ9F{(FhBf2J)Ю#taչhSv(ڔ=!A2hP*&Exs^ ؤ$֙^y#5PT & 0vLT&j' UЄ5xtY)dyV&F@f 2W*pN?d+ƒn 0ɠ(NJk̪2x BhI{UjK_qnapMb<%SHN"/\)JX4 j5\o8Nո6'w!&+R#UR>L&(c6=ը (ChHɰqFA-u <ҽes)=v &z%#Xr+"!JRIe:C* yZI`aa24Ĺ{_,tsBw9JێjN"p뢳o@Di}r|sĔO1%aŸH)?[kQFAxGdÒ%nly'o1 }5{/u*шL@Q34؜M'.[5P> `V~oW7Ϸ@+@Z!emD$]NiA$oezVzЫ3-3v>  .'m }kӗBBwH azz d#"r,%BrI+/HK7+H+Ml95IW/f0'v߭U-j6EKDWe;j|yͪ֬e{ͼE3{Fdk:|f +Pi˔{p;ŕ%Խ\jm< w H CJ޶+:lt;cìԬVKVxon%kTfYH*&iQ"*#9eZ203'O,}lnqhN6Z}wj6qmIWl:X>sEZ]le276.XRfib{;u҆.B@'=z(Zʇndzh/|U(j8(s{?+ӕ޵4$/{|DZ^wzt3˶P>"`3mzuE` Ųx"23qf."qM)ur$!NpJŭa9u-I'%|#1(~ˡ52ְg3Ae$<^V21΂fiaʪ#(W}NGs3B\ȚTY("`vhEX[9Gچ3i5,FbhmN2xQ !t\Aɂc\+52.iX;Inr rJpn@c})K,+7%OY3m9ְ{&U5۔dLD{F1GRm] ooɵ/^Ko~+Yӥ_d8:G'!oJ1+o?0j'wFFS$5eIBV2¬嬶SWȹoS7{Sdхх> @F }_sR?-Gi›Ĭ N\k=$dI2e3Iwn5Bl=VYXf1 R!8\d3>w*hn9B9kM 7%`2:?G4~tN|u8xd{<{kKJ&\F+ LԕhJ;+,̮躱w=<}`键,s[I+U5#xȠ1h}{YIҺn z5RD+vۦFϗ(6y0oovwuyrLW3l:_qx2ydo|?5g?kz{W]-.+nEџp{*66a6AJdsB"Q3h_W^«՗ѷ?C ;fy,Ӗ(#Z@U$::Xf]oE2,Z!9 3FJpt\`w9oѩw9 Fa3zn*jIrde ng9.˓y 彭׽2V-)Keγ|.ϒK=?~ w"3'5Zhoowrr;FFYz־jEChM3d>ptp ͧޕgjZˊ}CuO|o;6bs삘?χӹrnDp٤NROw18x -Oe/ N #CtyfHBp0`bŇD/'!Y-JQeMg2RV>blawxo8]7;5N5?۽?=$v ?|x_~|wϟp.>~xG/4% 2|0 ?9eywn /wڵ547ZZX=tn&|qMdܛ4RɭHO_ξXvӟONKz_ZMpu=H4 Mb~rQ>46]T:UIUBB,zW&ńߗ%I/ney8I)6@d/K\t@;iR)YB ' h8U+4uNRцΫj'Ed:.Eo&&\uJ<X@S\-괲3wivջZ+va~j;Y{.L=dig7`6+K7`^{r[>vKgя2R5.Ҏ&MLXd2*8\x!8iGl&q`AK^NF 2js9fg3p2Bqrr6($IY&9DnVȲK8HS97N8 Zx ̂V"YEXCEj7sNx+ d zڕmcfI"T)$##K,dҘޮ͑<! J4Z:xgd-)ȴ`6P,@CUV1jC$ĤVfDmbVX"gGghOIdz2F,I0D 1SI=)%cQS6xSBt2A26XCdU^+m]Dps 3gyr/TʱR$ +H[0U XFrѥ?obNܙȳ@ uψ&$3YrGZr4J=0` jTy^Z'2:qϳ h@_18G4Y)BV'7`CQYn~ (QWՈxZ{dɨǫsk=Bbޭ Jء<:@kD> yNV m R2@ PWqr޽yH7.ftcne2-G(tg^2F8EpPiE ˔geq%e`P@AJ7d1 &es?<~=t2kw{p_gA`/l4E?]Xr AxeZ<3 SYqM8\>w$4£h wS ϠLFe-S (1@hm(E6@'k ^oI>v2U @]^/cpQjA-VN^NQL 4UV0Y&!&SQEiH0exX9L @4O;g8},U=yi|cɢRc 'YN32Bl@=pA4vSDž?T"ű̚c>Hq(395qnl%k>"\z2tHB9I}kR/7Fka - P*a5԰l /ċ7nvA70䅫q$8CJXIp 0 dLHkٻ6r$U8`&Y|sv,8l`,F2ezDvꦚdUWd=WC.m_t fZ\vW;.MҲMs9JU;fFIX.E;(Z.MOfٞglpob: AˆLR0"E в~^40j !p`;kQ_h8lީ 7H!r9r4Ow/hq ^15hIqI/ý ǫ2 E&߁l{'\t >3ĨAh.)^(cqi5,OI,,r!`!8.e₰;:9d䤺T&bolhܚ]R~kI^;*z]:oüR?,LQpxֳ`8c!)3Q@@fz&Ɍv$Ť[&/"I#@%` vZt>msOe]mJW]1S7U[wiK,ڜē6gMB5x^Ϥjkj|'Jy]X3Յ.4.|p'Vn٫]l0]2A݃hhq4Ϳr$mRɔT^L֞[Q[],Ix` YU a(G M&jD#\f%t)hle];LƸa\vEkW#6ZZG8^f,SSpt"lpH,dB\!5X" 5T3LɨN9akԯ\EbFjD^W#5bwT)QcI /K.9QNLX kg0RNV+ʕji#6NJwYhE ʤ,iAze`*kj׈[;U)z]N@(,ZMwOIFvCnqPz1ջ=ꗋiuvnoލTȅ.D#׍7 Pqg;g: vT~yt@QT":z2:j4ˆGo5% 8sTW1e[IבUrm,1wT H]+yj/`%XB WCRzKnL9xBesm%_ p/!g8u9F7̢&#Uf27Y̳zV@@)NXf }(.ݝ.x|AR|ht啟4CSLE yB}x9M#W|G|]ߏJac9EKf@^&"g*|5-2{|y B6Oʪ* ɷJ%˦7 ߋA^˟Wato ať/-ϗw~i7V&juw7/B>/qBq9ހޮ(:=7&5陸%\g>~gPwfF;M"{e2 @ ЮBim2趵TM萵 ͺs&-;-‚AF%6dȡI֙^y[$n@f9 ^th_k=e-.alhMrU3nK}.Z{˹P*q.!*ǥZN%dLiaF+ *jL zɭ&PUI{9;-Ebb$Ld2MČJqd#( TU̳1N%0p&cHu"++6%c wPYΪsr_`Ԓ !"7V*D4!A-= :@\ N&!,$F@8@ &KY^j= 0cIkx`V!J:)UI୓B]ȉPԊQ_vngR61Z)R=dP̅R(˫,VĢb^AR V+*SI&đQ2;5dGYj8JJJT)1}D9K0 e2- օL]襭ގv9MMjLݙT XkwH9Yn :?)p͓ EEPl0 x5TRjrt!MYYXmf&Md]0 Bfb\ $!{8Τc9VC>!٪PߦY *[Je3 )1&$ gl%ҫ>כ=4Q ^Jcln$קayy )qރG&Z\bۇdbp5ЀQl5:$|bA noom1N.G*eJZ`N? 7?M9|/wUh])nECh˼ 8z5CmDmIw,CR=i<͛?Љ4.B#ӍRhgBJ㨯GHO=]XUzգ(k7芿F'6i=#-GpA,  )i|*y ^1 ThEEN "3nbIa ZLQ8" NgM" Jq|T08%Yf-EF"(FvEj?% ]Frh6۞:d֔/@w>>H"j(u֐ࣇ1%&Ybd#V з,nΩ t6` WAfq87K5~p'5Di\Lrsl<'N%ɣh: ejAY>% K@&$ z&I(ghihN^kK8ID5d"{^(uxNe*j^1_|z |(ϵpJ?5;ݺBr1c_et|^1LK~Vi5 , \G& nekV2Jn(SLpƁh'~6Katu28I#,~[6/rtX >DFXW4nx Ϲ~M<-O#ݥ;}}DX|^?ΪHfKL,|4;.W;!C~i։ZFwҼxeybۭ_w^7]^x F1\?<]-$'.KbdlKO$[{.u#vCMfyMBi_/]U`55z9kj~?ZcvKn+@o?]bz mO~ʁȟQ\}&1X$iqܦJJ~{[bśr3[=±}=Ia;Dj=߸\AF!L`U"a܁ $`#?J6Ũ7nJu0-J#te%ۚTkD_x ^{6 _:SJyoxicBؘPRۺd UF4\W]k=؈ll^V{`SlU,iu xbдx@Gf(6恁niy:E*uD!dSqFM4TLAk~#h'glC(R,gFAi[dظ*h,~fuT0K4rSΕ'.ݺ SZ%vvW m'vt(vdAKv>rZ^VRHNPl(MuR ,fنg9OU^ʼngp60]D"=P\äqTxJ r.v̂]RK;[wĹ?ߒ qŭeMhߊLiĥ(n|lU쎜%_lj2,av_ǣWit3~:>mn6g>.Og?g(78ooZeZLG2"C4~ÑhїOKQ?.NZɚǛ vw 7}Y:idaѕ3*l,1]~ګC3fDQfزm"ǎ *qQ5ZGBFU\D6Rt*rJĺNO {5@Crc,Jh5BsէŁ8KLYԹz^ޱzuYdgoc 1A<%c6m\H{RȤN[]9(ŮvFi1J(.i@WAYWR61vM;W[F}xd:\1cSLV&SoJ\6у` hՀU0@Ox);pp"b`["m!k)S |j1AX#jQGԟ&?m܉ 5[Wdb4̒kKXL[i#]cTt$BoW<5! !D/ggPc+m( hrH@CޥlF] E/FS`jrh( Єd SV x(Xr@Vj2~qIƆ|X:uk\M'/$Ca=%Y4 Hb7j}TNi ᐽu*Ac)e48gޭhja*Z?+Y,2N@J(=1E* Ƙ4` v!X9kD7nUO=_Z&5,kMʿΟ:NL@LKd ;0o{EiPWRDk숲l`i]A9KzDpjH2깲SNƗN-)m lB, (rE!YJNig;vg p T-mGɏВ.j_vJiڽ+*jbߒbb:ؿlR .nUdqU93@B6+'M=d&Jɦ3e|JK{O@rGlU>V׭4M"=M>ƈX*^#Ω;u2l-Q;X `Pۖ0guD]7EyAd:nܱZ<<|e9)_}yfBfOZ_뷛wۋ9:iJ=ya{à}QmHԞ!#,@rhҙCMDHmQ(Lz"c聨hj&a 1Yom-FW55&)RT`-B*sFkIcK%%YbM;MƄ~D ْ b!L>CqW[b}f%;jvyfCc1?x&l˺bL8ATbTB29T"T0;V虐!O[ ŒzhY*XPi"&!Tu[&<|FA#dzd(qn|j6RZcEeEQLV!$).6ՏIh~iש+A'wޞI/ l;ėzx3۫,ra)%ҾxT3kTŁڂ֭6 `?.3&'ȴ\@weq^>-VcOej,D= j p˰b5y'(ng끏,R,/[^z~.b2f+ؿJMbyFN7 nKБ&D5Lg5hZ_pQ$$l8BT))Mu)HPSЊr-"'؅'.ƵikvZA gܕCTE'|  I{ ̪Xs[c^gRz]@i`è*0wӍ{ݣC:c՘,RK6g:T>GQ/&pPϦKK/&nR=~=ө~oRx\[paoPr5 = EjF4Sꔇ/d׌kwEl? _Yl[ yC 3rٱK__Rb) pBeRPE Ue*+L lk%|ݗw<KKOهxlRIUb3FHNB@pcQa0@A\\lRV\z#jq:\08mx&y58wh}s|a.Shy5οN7=y+e5-hV׷;Et|lmI#'5,\\lT{ԷKkWTC6(2G6 COC-PUo}M3{ETzWkJr(gM o._Hca_+FW*kjWMȸ i}wB7aG/^Ɲj+[V'<ۙ-,b_ ӓ GlBHgWp51Uqk)4B(5 6ubNv=4\*d{ %~Ng3\i=Q@gjO vƓx`i@ 8Sl#[>U682D ^XBBNvkÙ){,XL9'Ef-3"vg?"nGUvd’Q Yo=QI\8.j̳،u@jz{)@A,t[bBU_4MbF05`@a,U)8Ώ<8΍IQG0?`CR 6G*lXNwc8smÈu+ |> |i61$[<6L [S{;pĹKA%.b>nXٕNޝ˭ 2/vxjSSf b3U|浛/Ef+ⴜ{)Ogԣ-젽7FHCIPgB]p̯уh݃??K#\HB ZrY!9FΤօCaoԱK֛(4&vϲ{[i烯k\7ԍm;7'%}ρG>2,N˦:|ZbHa8ZEa0C&CLtreZ{_gwɃ[>9H< #=4X,: dC#9k=X6*-Mתa%wwon-Xr?ע 2S9:=kTѹkg,~8:iyK\կ}m jm}{^cUN?$s{sk 3?-bõy/UʺJUk[-dyށhTڗ֣SW*)* 2OK4C2:Vv[ {SOޥzK=zW-( XbrP1`s5s!B$zi &[ֹޯ>%8 :|R0s bXQ$އ &WM@JR^XLל"B'Bv15{Ʈf`lI:ej0ؿ}^uwn㷓s P]19bm_7oX>͆?<ԶL 90e3ޕ$Bev]RyWX`05EE!!$nUʌ"2Ye%Bn: VjV:4hP*fDIMYEBN1CP\0Eq,t&ii%tlOn|L.˨-Q2V "9#7LSfA80~l-^x*2hܡ뛯/[꿬Bk; r=[5j}?`0fQ $Πlrd5 FH,$|R%l`9fo*YΕ $gOgq#ʁ5]!1r6K8h4+3]][z݁!nb׸ڮ,g, HȥaO0%Gy&΍G+Qܫ0K/=s1O[l~fȳmvãgnkII:wmŌ rxϻ ߮Mټ }.?vz͛>Sœ\[=g|oUPx3YT.ue5?ulGrew##( Qr~.'Af }_E/d 9Zo0̜}w2$ET8MK5u BT @587ߋpB=Z2oҞD2L7q8:鞟z/skv6Q3 ?76j*$*,VV#T0YwXOı(W Jk:}\i,+m3gUe]Ucq9wZYԬ[&V!d櫼MQJ=`,Bb{`*u+䔱=RWDroU!w)PTPijz=J[쑺"tEZ`^UrdSWG]{ F썺*1Ho}BrdQWktuhn”L?.;Pk/[q 1+ &QY*fUR7|ovmKr0+=| 1^+S 1 FIv\ސ9nzq*_t4?M}K `qr.YMᮩr%**`RU)] $A :U1ɘlw"ك{D2&v(}꣣4uqn?R2G֊dqUVds`5ρ~vw!K=Y$|B{!MW~pKb|_e>Q<az;uH+%f 3ӂOӛeg9 >n%t\p % θdh%iaә8OHf7$lrXт}nrrAnAgI a\}Zo#G[,%蝻Zwvy::j+>F03A.r[&X9%YES>+5uxOk47ҟ+]NNg/;Afق3?̃tn%'Qo8~9?-&wo#֝#=٪aa~ef@BpT'2e ?=֟y9aQ[?%W] 8ͰG*i.#e`$d(87՚*?{~JRG>\uïh $`m FL?v"`IYޟ[Cr=M M͇64ֳ f\+%4;k[lIwsJǣ.P*>\d *z{q"lXfMX#>BigS?OsXяp b}tMܯY!ogb$Řl6:;iAU@[i4Ъq9:B Z5QilypVV,xML^+`n\U|\A:[qswmHtNR|3쇙/ $tɪr1'DK4NaN "y+tdd[f2fΞъt媟įj|\yc>oW16G*3~?[yba~7ֵc1^mH3 1`*+"6NoG=.DPPhIQhz"AiT2%e @mIl8h-TRGDb$!rBE))NcZyA.p8!F2:\5sg0 >-D,||Twm=vPՔѾ"-`C?ox䱘c}dE NE!a*dex2CL쉙@lSx&{lc#4֐% chRʧ׳ΝfeeRlHR{H‘6,:5䩂u8c>fCgѲ0Ι:1xͱ9vr!:vEDŢ\: ׭ct\T-OI,l]Jˀч8@8:: ީKG䉱ztp)I3TmdfndR^ ba>1,d7yfW8mnE_\L~8ͿqHTLh@y֞[Q[]4>20n=+3P3Q0&j B2B.9̢2g7bPPZx$jKV]C`xE X* id䆻8f㉀)^Tp,e(B&fȁTt`bQ$lD H2E {Q:Rq(X<y]D\!0 *2#eeƤ1NsTR'4Tje2=),gL mH6\  BIs# KʈX݈͜xg !uV%YrhpqАx8';|~N=\_:A[l8Վ>z#X=;3Hv@i YpHQTI0BG]FPJaeã7֚$Xɀ,Q.-۷x&M%zRw҃!7v?#UΪt .>i028F&9sYǸV9kdon:px w/躥 Q>Ť8}TLќ?{f4ijw86Żxxab[,_?tc~#_gU]_? fݭyںآuf[,d=7BTɨޭޫ ^ d⡣yvӜ C%1C.]ֻrBJ2.$ re|b2,e)YpQ :ވz-0uG$s ooQzSoSgF^d꓾/3S2>& Io\:XΔ VH,&BF1:r;9Iwɩw9$1BA#9G<d7ĚwuW; ͺ)Rq8]f05ָI/ A2s^>BS;/ASU$F+7Q[陉YHe\V,1Et4sUd_؜Tc36: A^N;^W]/ȍ[My ڝ񃝽!3o̅m!JM\JwS+yck~ygۮWyp쫚JnEmouww"eߣ\Vvt*מ5W*{&]_vDrkfw(/ )NX@\]_sI1x('g?e8KC6:`OnO~.Yp.0sz֘vcsvBӸ-Le[F!E~9׋ѓӅ(>n\BCs" Gˢ ߋFnV1tƥhw\O]FFOw~ V_zQ}f~9{J_0G q9F`oDek͈zjr96%\'~Y`U1cbUrmjpz cbW %3hઘY[?p*VJ•jMJot{eٕ~e|v:~Mw$b/L|1u)?03/7N拢HIj?-?:N1#NzÌIVן>5`lGs;&kCbT _!Lk,$?**v,pU5npEVJ5•cjefhZ`zpU4m5•U\ZsDpUr9ScbCbW0BވDmsu:Y:F_Oy%qww'B>΢e`3IuNcփ<~wSt*D=*&.ATJ`CTIZiq |^@*Wu#,~%2NcNkN~Y?K?t&=/ԧDfyZ4͸E3nь[4͸E3};@Ӗw1*KXV9 }XiɤWʡl2l)[bl)_RVk4δ3-L;δ3-L;δ3C7zO}0kygZwEnygZޙwiygZޙwiygyeNY klygrlygrlygZޙwiygZޙwiygZޙwŴ9h;㫰"Meo7\NjjwMX$s׉:WB;`fʖ-S0 e ˿ͫ:*SQ31;JȵP<(t6I*PY"H{b;g@DAGQ{qYƔeGj,h%*Pڛ<-8|\ǫ5fkŒHOfi(W\n]Wφhf)zAX +.Y3D#INq  4dձtRY>% %zk uDo*iLd!281c9{.L}MAHHq@Fk8zgq&hHASKB]o9W8` 8ef7 &XఁA6dg<߯zX~HlږVSjvWbK Αm)D6 cMƅ:LBu1qsQD'lv>b9HؐKf5p['~Iy*F3ccD@J11ctLIy FYO)%EcJ\p6@( ͱ=ccL¾2{}ݷmgP|!waOGJp:eӑ&}BI^L᯾xtd7?h*Cx+l^H4*y]MDhVN\t]RMI;Ex/0 ?D#@zoH KZ5eC1lU'&tOb*#g*A?=@bAR);^GnE_ىYM|Q_Ar{'Hg%6۬]M>scO ;ű3^Șs\K4{EO!_+Er8C4dlF "P$Hoq ;ݸ]'>786{7!nU7£5{cgqO&H$OfO" ki4tIuK_o~lfly-^-sC.d'!ˠ~*Ŕ 8YKƳ\i&ދԛ{>KCa9"lKoWAe}[cF223|mꏱrǻ'}?dGP! 6M%cLy`<`ɩh ";bZ-9vD%ܰ'.;u№]ѨYdw{H_fW>;?xJF3ϖœ<V]qӶ{RpN1'k{RNu#vCUeHp8jFbYGb'W}O 7ugmuz]q-Iqb5_FTn}>( lv5Zᦆt˽?;!uCwGۯ?;ZqD#0TGˣX_C׾Vߢk,S>ښǬ Rkrm'z?OfAGgBpS6VMpB_!i΢f^ߢ/7R_^:BT6b&KY L$ Pv%}>&vX_+6C+A:ﴈx \R{XD8(9/F T۰>r1e5-:Bt4:*큷f' dx`hELM6ulLpz~H][~WGC5h`v3nڭiu~cv)Weј9QB(%KJX,!K^*A oDP0ɖLsq 9~l}az1?lh9sG;\%[~gIx[Dže 㰱gJQ!p Y)&b Z2IHs (V"< "]q 00ℳ "85 QFb !gLF)Jr2Zȝu*KdJD1k490:w pAj᪉s>!#/[^ń|:_V(wkZl^Rjx^yES`UxLڂ@uIZBcXvG|19b`2@Dxh<=|y6 DC9=8ZLx= 47L(MP(t`XN A+"Dx-G[,bD7 it')Ei·G[TLHOXHA\l8rmI p6_̧ݬ`EJ~E54o]܌&%o2S7ioQ \nhA8Yw:R:~P~9RLJYɷ2(3q,*-T& c"lc,m:e6a(]t^g>}#ƗX'M?O8q {o?NZVIVE+a$+h&nYd_gyx޼az=C # ]3t٦mg'] /ϋNt*a-P-6~}~]K4z_yp~.:XY{3='3}KSq>++ǭ{fـGػGR8hN={fB?==MrAϤ0ΡGrl8(@xxAyUjK6Ϊo\K˥᳈,sqQq!=W \fu46 suG6cw`f9'4߿ [pR(U#dt6h9c@mR! @8ҲT7{Q*VrC":;I?k^~ "*ҼBj&j2*ݪ}q18.a|٤k 6o2a׷9~tr;o5|S,T{nxiUTǹsZ 0gJ1t>3c+q9|H\!"q6@M`rG嬂E0@62Vg;2nf)/4X*c^󲍱fgv^rr9Id4a0}'ӯ "YTŤ0R)TdN3'/Sxfus: C&cRؔ&Q`K!P79afɺډ9kh+徠v5x jKV}ݤ,0&[$h"qEϑA)$J{n0kcCDY "+:I5,%Cđ0>dj!Tjl֨_Đ`DED^%Cč).ubVd EP%ŤᠥAN(r '6F$X.* D)4hR"KZrʈXMUGt>:iCqQUEbb:ʕZɚ Iq‡R,i$C6[:\| \<C] lˑ1v |{= hEoL _=],hA8Oj|ƱQCiH)iTBxR#f |H8@=S>K10)D$casT Q YLhUPY֥dizl9xK(#Pך*s&(X `dMks-. kF8l,a/1n, >3H=yf8Vj6HWdjz w94] D6}@_8zջ] xe_AYClv,@hT5%*=W4tGYS7» H7^]3vOmq]+u:% !TȒUQ&JNJ@d(h=->הP)9q@0Y@vR8pS|bG!8IHѸeJ™СЦXs\f㖏nhgl hQ6p6R*@ c`Z'㢈$4JkØ%vY! "0qÈ"xsob>>) FS^c4S,,3KqPdH745aI0`&jgIAL21jX9-嬖ߟ2`Ԝ+Ryj#d)DJX㉛@!gHPP"(FD.؋@I}.T-?91Tb:‰Py!$UQ*$zk@x)jZ~>OzZTt$-%!HY2kY0F1A+QT F8GѩDx@Nv@6p+O 'Q a IO&J"pAHD q\BVnGi*gƅ̭b,5kS1GENI*% k 8Vj"9J{('k0֩:8P(|d eh{x86&@gݱ'5'>䉂MpN`٤ y.3N}|jJS[ lSG&T2o>E3L_jh"fW QROΚ ,U ޒ7on_.0/~6v8uͺF_s1C?~q6V2@BA&~lc(ٽ} 2}3k-wJ,*C@KQycՄje7^T(5f7Pګ12.Z}+m4_;АH9GεP,Br#gBj76yէ7͇\JoR=lxR>.p~\^WZ,H='Pyro8[Hutg3Ge7jHVϊf?A<Zq.߰f߾Y;h|}[ۅ{ ~JVJ~;< |=[q?02 }Ave] . 'L`q;!f)wtv4TukiZ+Z'70y=ZM'yk?O\w[_xuWK\1藧˛=~K-xmyg-Uӕo/݌CM兩QEy;US.ʘ'N4 @[zSd6x*1+lN6eXeZָWAg|72|6>m(d|8= G*.>>϶ jgdp>8s$qs6<\a'STy2B99 (68 {qY`/d""@4&2ldNGS5; ,12+\e"y! Uc[vnK/?u믎Y+jr|ž[ix{̓ GzlhŜ8aj2}.2ؚqGa9ҡzs-yg@Ao =Me{{~Qr|v:#|[^< #YP#..7Ý#U=zF|&Xqz\Llq%'~F0|3!J/bM_]o1gK3*&D^J~I@ i,ߑe".d5h`@_>|ܒ3o\%5fvLg\\ľɏXz`2]jD v\_s"'_2т8<o,.EZG>} L R{*KLu\9=!ccY/in,je[~eK6dʭ-VL3`Nxr\œ*uO.S(=) q}uQW\u6*S JB TW0i F]ergjTj֫O g@("]er=uRyg0SE^2Oϓrɜۊ-&_BU:$\e!U@! K󂹚9AJzChR4'2kI` ZM"j)5<=f.[}[_筯yu:ouz9u:o}[_筯yu:o}[_筗[_[_筯y669с)l Lt@ތ\!N/6F<:tE,-6޴ [c!l!p_pnp3._RM/MҪU]C)~)6ДP2Rg-7RO O$]ԂO.4^dTYgV( mVwPzY _͟Ƶi A9!wqQ&[C!8ɵw[mKXiD{6/Gܡ6Aӛ2{~N:g~5[sV~|4׳m[qÙukmik fZ,֨eRT%+}}Hu758U G= o/*?e(k/v=X@±=ڎH+Y[#ꖽ+L` *I]dh%d[Ig=:ͭ0}+uVЭ0UVqWKj%a$*IHI͞N&@stBsY.[.`c IJH$BzF-FsY ݡ8Cvm<~TʢߤMe)c;|*SQd|>zDCj?k()6uH[f"2bh qAzB2|6J~rd4:W:JD $`!䉦 kQ)G)Bd*|u xPJpB&ƅh`*_S ᑚ@'H["P\#g?^/vWɧϵQև|n3]5!?^Wv^^ X^*_¼olz[UmfY>nXD$P7 S$gCI!SkhK+DބӁk5DA9D )T%M`'τF`Pj! "I$А#$H!h͙Ҙ9E <g>\1r+ KtH;[*/ *wZw`diN>1o8C=sCGkXt>H \!%FIМ2ڐp,"{SFLw[UOy &J)mq=ɷ)(HIq++"DWU5O͓Wz@q\<8{w\&yB΍qI+F>F, \ĽĹ /oֻb̶%CCz(* ~aGY/PY6gM8j<"9L4SQfHպ!8 R@`ֱ^RHZ]yjeiٮJwy6-m424@- fޱxϻmN7\OKQی OLR.DU-lוztmB;]cf 61 Fu杲`G2%W/ŗ˙[4mxM '`hFILx,,9,OyNIL @ =N,g:k+p@O3/;- IK+I&&B"\cFj<.|QQ~]q2pkTj^9ЧSƁi.!%\s; .켮-.=G)_zxDNO]Wq{ںڏOSWqT2խuu_WॗOU^{mJrP_=T޹'CIOx78R*Bx^`; gc@Lq-Ɨ/[BhcT21Q18A :THzt<ZS_a9-oaT]ڲ<`YtO/c#BPA&8nȠ8M qQH@{a,"5eC B HLϒBSC!ьKn&b䴨sݨj^t_;ܩ17:.Aj.po<@S7[㛇hQ"9N%F>HHjH 2ɥdz2ѐ{JmYN! n¯[4& m UOƛ-c`RVǠf*Қ95c9RLCu) u OٚddTf~_ ly_4h48\cPL2k?JX+VR2Lz`2YL'Ӑ=1QmrN*L@xmJg,E~&p1qǁZZz#]/ٻFUFd_ߐV}H6KetW6&}zo 6X 3Ɪ_UWW0"TQ6H hghD@ј3P=T;H2"C hE{ʚ,2%GQxf&hTGeۮ9aeς²`Dlm}l*iq^/{ؔ@Dqe<(3R D(c5ȐkHG(ZvcHhG$AKYθ༆(G=Z )g`ZZ"~:K|Mbu9b/M^F :?1|E@dT9D !p"^.B.vE:6ey(6;a+pR+{]QTk/-fJ8ե޼-χڨ%(g%h+KFk_rB wQx չCEt'5` B8(&&KE$IzV#Go@jy+jG#u!T\/1.5.`+,A6nurVr+/=a˷?r, Ǐ yn96}6Ыz0q3«q*ZǫWdt}8d^A%^p1m'7dZ,q$O'=q`zt]mA7h+XvpE ʸD|,!*6)q*0w\X,4}VޗB]jfRlң|!ߵ# _#޹+ A НATE%4KSOmH)Où!"OD$tN!7F+B#kEMwF)Wm/gX@e|*)bq1Z ⰘsYQ~BbozPw\>}GӫͯӰa#6߇ Ѐ!Y'QCjGtڊ3e5 x8#..#m7+7T$52J1 VG#$K3J1&*yvY⬯GIjŊcf,_Ȳ@ͮ9x ,GIV&I-!*Q+Yb> zg_({˱#"x͙U^jnI(rN ĥ bHQ4!wlCe4BOO863@MOMdA䣤I.{HSxrYtY?x8FϬ.xl~Q~.7~ՍOKϭғl57Sg1uZ Om{{ \uuzҜ j?ٴ4hc-׻<.g")*˭_6C$k6WEYֵB5{;ΥSx:%1RF<hIRzXJ2bht ̈́R{$MwxpV[8QȄQ]P`Hh;hkby|ozrdKT8͵oϳ!wϷKy1:=lx-woPΛ\g;YiM\[!yq"AP<>ɠYHS.}BN zi$Z #liȔЄ>Hx.Eu k#" <\Ee eQlOw20!/_Hr֟gF^+㟏I37x$!NpaKb_eQ\t9*~LJXLY3ȏ*~hnq{d}Qp@51E%vpENb㥻=OP^ SNlpU%o9Orp6'wxzWPN2ʏȢj,,ЧKwxo+[ 굽5mBGGK%TDs`_)rCc|hgF1P;c>]ԏg#Y~OnM//Đ91`.gr%'Q1 T'q'"! =!_7.aRmfy|@ q8`ŬGËD}.GǴl앑{ ׆jB3^y#x$w,puts7XR!=Ũ_6#J\֍ p9 w߾.wN('{|;8D4$K|ߎxBצY\%6G=uUm/G.Iʘʭ7 ÷GGog5]Кu☷P'a$n6QQT!}1iTB|坸ԥk_b# :G.=_/MDOd+7?G? 3S]T?߾Ó#oW񇛄S1K?vks-r 15݊ZAwL,8f;*9Y EE$hJV.zUJ9൷W9|3 U^*}D)کx!>z7Zד@k 0B\R!ll' xu *Õq`DPOrH RFS-rVۨ3obo;_3DP _49 8]L$`%Z҈? /!H)O,u)# 2WRֹY%A%5PĠu.ɑC@=Z#Bi)2.tǜf*' /L#66@uFN*7>찛<,pnDJ#z *+&A 4$eE)b:cױ޲_^*@[MA ^hvXShlR\ חBNR @DK"J-ZD)E^в a :JǨ85(5A/8e3Pz"urx.hCL$]T7A'-*ɭӮe@kFV_6 ʝ3teO#4{e{VQ{rˠ#& GH007\N%#S Jv.4|U&XW\7 hʥ^\qaH\!qet_U*S>ًW#~ɂ]s!?wӥ\NG[# chD ~Dx??ůS\&g8() "=@3Yq㖂Xf2rT˽apson\O=OCq p4 xgPNq?aT>`ވ+$E\!B讋+R.WL1+$XL"2w]\!F5+alU&XW\E\ejwi^\e*+Wp#IO|DX,x.cOnIJ6}#KTYlL&"+"22&* v)pUw*R3+8Y_yÑ@\inK~^;{L"ŷYk;Y|:N9 5Kh zms?Lo_Mr9$6Q}=>ۨ,h@X2&#&+k J~pG-cN`&q fޒQd^Q*^C.ȹܞz"Ȑ^WE\y1}yHWo\\'/#U\ \iO}NpER"-•D/)՗/H\c.&43{"){WkoWKʝ+-"i>շH)p쁪9kΟ;sνL\煫IeRv{Wpe:Z ׹ED$tzwן~zwܭ;w7j.gg#t0i:[jn 6цwBx%;\Ozqܤ^3')UڬMY_az{;,US:"-{5~|lo•.Нo Z*+߀l7y~jLU׫B$حbug۫/(lƻ~l>q>Y\h:3N;+&`ܿWq<$rh6lkw6J<*qէ~y+sK[Jx7M_ (q z@v9&r*kJ~:7sOĭ[-?Fdq9K6&csxQI0 ihaM\-h\Y?U,y!8G}ާyꗺeq24骿V> ]+W&VʯV~o'G[m2?/XY\Ojm~4Nst{,O>\J>6}7Bhodq09%2%nw61yd / {ĉ kɇGȷY]|b"sr\]'N Q3%F(Df\+dAS\3l=tMQX-E>xiRVxkROWF )i],#zʐc 5S0֨<7vg *|,} mSDLS*ܒ$(G@6u<8+ Tc<%hur&z/ dMg]gIvSD|gP\g27.syhZ=(63Y褔眨xv8V%Q)&ЀR\Q[ V "9#7Hler]M亵wM.J}8~osǻzv]b<vqX7>ZEc'JQ10Hƪ)&"D1 Q(; Iʌ` %Rc0$<2wQ*')d<71`R!Kb$FY$tpۨ5ŏR(ts9!X $Bp }76_mJ~WO6_2۷KD6a|@mɇY &aeø K930O7q [g@ y|%Eѥ3Gĭbe1I*IX J'䩂<0.v,uÙKV$34q nZ ))Wd@ -SLZ6x}5b}p-ZlCG P>^DjGv#_@X ėjxqՠ/A_4R:bx":(L2Qp R!i&\T ֕H!yDQ8/H_vtX.e!*{BZ|i^WН/m\wH1d$~}w<9͉EX-DWak>s n6zXfVxbdѤَ߼L9%o43o/Q}q5Xݰċm.Б"D 5cu }ؽ𬪱 /DA̳45& h Ax՝RKIao>b:s#'n+텪SZMa>%Xpro#)bTQ[g\7"c^s|B\vNDu-JBÚ Y"6$F_19xHI(Cܗjvr D?6C*ݩd6[e_oJm|m[R*TEJzO3dlfSQ&u$Z.QF7-&l'LѥhrST\INj@62Vg?2*հe2 O~0-q/헝x/4n<~U$*")[Gq%:b'7*Gez[SX<02CyIIR+Zب "$#`XC,hSA-q#Nb' jW#Q[ʨm.Ga("ZLH-& 1-O MtRǢnIJ"E 9Pd҄51D&("QL(NڹxXMx }Ajq,"򺈸ހ"n+uR1pP+QP[g@4YY 2Z34Pl$Yi. 胁Z&E҂$uCD`*#b5q#⧛Du>f%"TE1​;`1ġ&FdBC_@=0NPLD(@\{b VqbQO}jq,be<βBPF}x|ZvFў= n86{cُRBhNht;ϠFD@BP)0N5o12fwI欬g9Z;puZzA}_:hxELPc^y\ N!فB!-Y 2vp. 3ۿVKf82= :P)\f,G흱!rժGb'!N:/w"*a,K{/MfP &"sHH @ x;GUj6qd[Mv47&_O3"5"8;@Cֹy,q͌&P!19;?x[7"$,UsIѽ9S_D|H93F hWVsdE(X D=#UjrwT>{ z?/ kp4i<6ci&v瞴Ddd3%HZ_zZ)`8 VA@ uZK&q f6Y`9}#NCܛS4d`6_PqէٻFndW8i6Z>lߥ4t]?ӞIGUu۴ȍ68~B#qwvh')ZvM }0Qnv[ky)4֗Rޫy}npKhp*-> ^{?@gzPJ>ԛ^.ƅFӺuI7:] 2xr]D2:6uKGGo=x=A{Jܸ>0U 䊛5`PH$8ngt 6I*ŅG|B &2 Y1# \{(w[JYL1QL,UXB Kݞ_rY&es())AZy3  a"{>v :#g5"xYʌytEOwכ1fyq.>zNbILّh椉MC+dIX->1x NۉN..{t6o%\tӃ2ؖUs .{z·5Qq"X07 Ko ^r*#O _Lq0g |q u}O!K@XǐT !&Q2::>8d^fcj{~v1؍^9рIIͬ(NR,mz!j.A daMEM$j FdLfHYr0,8BM=fgV#j|6fɘݦ<۞ʁW}Pd!୲ K^U!Va Ɂ2C(uvlN'Gfs$TֻFas!TғPȺΖ錜|'r8,g{+_/HZQnbRݮ88žH-˦F0r,[&{QQZ( NB ryˑLAJKBĬ $MHJh2X\<]Ҭ視0gg[%HdlvgY56.]_{bKW#?02 %jGd|5H ݲԇK=*(rد/~j\|le鲞:h"&j'N*sM xGs:쐵.-%194Mb&I%Jc0KӺ[Bd١J{SG퍓p<"gS)YiVhA+Vw}p95*9z~/~]RTX~*lRvxW\5;H$N*t3hdd0H1m?MC e )&`X|22XH6HDdîwFΚζ )#)yB0N;Xnɔ:xgh$u(i,*JRlKJo3چPg3阘WƆI#&0%fz KhY!7>8RvGU  1f(K!2]KDp 5@֓bpL5BxI h8e%ilW߷-k/څNn*>{A8n)ieT}u azT7 '՛6sݓbDI»ê~}]u9O.ER\;hN+p-}x@τ LtwA:oB CL!8}p'>I2Ep#岚 5D0ײҴ;MW۵^ v^|5 3w};RX|8Ktpybsn_.<7 >:y]VQasLV:o`{.ӯt~#ž0mp)'$~ҸN<$Hei6.Ym,^kF{jj{*MӴ7Z$mgU~xJMuf2\gl TvGNԏ,*/# vG*/~214Y7\ ʝ%_vccY Ω&u6ǽT"B4?Sk 9Ж^:v=^DFgN.G'cTEH#Kt*:ˤ?]Ĝi 3:qcM6E+YZa s C:chfEtɥ$-8J|Po],5F'a9B!J"s-M2qLX}HpBPy3Zϊ/x5 .|L9~Lߓ)N}hyLϿ5:nu\8)ޢ2I3?@zS5=%*W A\1#kAëp4ς!YNFʼnGܒ!P{K&.%=xGt_FcRGg[6U9}d,Re"}nrmůnx:j=-@yik5=>ՅoHsE^^-Gɞ/h.aD\ŵ!g55Ḩv~}⌶tyQenڜWWl\u|SWo(؜s7އJX%'Q5~7oOXH$׭#{aD0BOhYޑb D+XVAlG1{=V*&6j\-Jf2RV>^6gWT sU1?p??o{n'~ܬ7|\H}Oowߟotw\7Oӎ'Zq)`$c# 5o~yЮi5.g=]u5]Ne⣗4Rc|mH/W_$%ckE+V8 Mb~>ON[4T&U|{br3v|Мw5|M²aŽu4(4>{l$!&Kʋ&{iU"⁰,w-p#pAr^mFRކΫc=[8$mqpraPjhb!]`)[ ғE␅JFDDáN':4ؐ}[#h'9n[xڝy0ηQ8d1y,QZk[n wneK]v<ܲP /0f%#uE/:/ꊨOz*T<1NS25ިBRWWZPiA]@urnas2$–aCyrzh+Og9QǞ ?xsi8[G޾z"t F8UJآ0`2yj+#ѬMh<^hH&F:ZnR%&hPk0z "Jξs6V'N9PE^ BqJhi .CtR,yY ("LDHS{j=RgW:,{DZϫErK?Fu?B U6flwPo pPiqwJjz4q2c|=l; r״ꢳKj>`uls ;݆_iu7/`C!ʠx %Lo`e,׺@_}[ !Fov==X]d$Z%s?v4$ VyqCP-FI+XbK(ʿsOsY7NMdLcDIT+!hXLh&h6t{쭹f uن:IBiS{4a݆]ʵJDI3:)JM DXQ]%EvCT 1x4P NR[dADs] 4;A*+Hf9 BD!҄}TD$}gL$$D@H` +A[Khw)Yrfw2{52Nn\?.0⛼^ʭ *3ںX=;3/'s!}_{^Q@vLF 儝ݐs8p&Wg)*3UlӠ9\B`Or>6PqK܏9{WK׿ލHݢ~mB7ŏ*Ap&gφVpyϦ['.pG5+?Դ͚g#v:{s"0[A$<sjp~1[[nvIN#~{ÿBZF_+GrHmðaU[YdO(Z? U̻hxu?_㘃tQ2~$Fm!T20$d\F8o'^V}zF{IǿzީVk:Uu6O|??/?~B|=8Bȩ=H0$"<~ah[jho>47bЊn|qu)\|ԂFjj>lq$s뛜>N&Ză ^F1sLz颦ꧧT Ay/\Z9LH;>XOZ2na_n4/|/FR`"0ɼ/eQ8Q p4S!T0yU->N hM?y`p(hdh/.C .CFY CS3v:Eu&0y!Nh(Kai ĝv=^rs֘'1M+pUt%cWIݑ`e[%uGr9j(URwBOJ*JURgm*{zK&-8q^/+R1čc(|n8햫sw~.JdL;|E9I\2 WA,"|YDZI,,q&1㞦)29K !eecq.]A$IJw%UI$% sL Hi OS#g3:D ]-i8" I^z.]9Unw![sA#ALN~G)`SD"`l&qo w'g\lv5OlI ٫7MYq|VZb[DDh79A)O)B*5`LYM_j'* ?L֟Uto۝zJqmHf"7:"LUF,DBFl2/7l)N1A1ӑk3DJ`,zD +TΒB8 OtaBGJTDMɇ&cw{uXMS"|Z5,xU# רmH!  R)yp)2g4^|N|o!w9%рACYVFL(f6rRi"'u2ߟ/,u*hYsy\&-6 !S. UE=%HZI}1?ɻ<'#.C m"j'PTڲbü;Y/PYFϻd.aq@80"")Є(r0"uc2q9:5^J?9#=Xޝ3I␥LtLjuW78y֭:IϽ4]&٥erud@u+n.*p2Yx҇TĤ\tYmn$MF?E!]AN]ϽO,~384S 7ΈG t-j2=؍:y[114$*3! WUipVﱤ-dtY%eS/䌔cn|0gatObG rwy<8K䤧[I=!%f u я2%exkn=mqSЌ')\j6p Ay,1ŁIhEXCQcU7͆ }.9L p35n4<0Ia8`8OgѲ@MtCg&n'MyL.]y 4c.v杲>+7Q¬-[hip_ugGDl x!R9fItun7ON½U9޼9T mt&Hۄ;=ů.bj.ɬ{gի4/JK,EH L,z/m{WFQqiR@clе,V`X *tA@YqeS"5EyᓗH0ǔ+L'W.(ȴK6q܍*{b ۢiNiZ!Zt$ ;,hAHN (e%FmVjwN䶩vl%Lx͟2$ރ`rNܺh+rnn/]OzJ+/|^d>o)X|}72O"ڵuSAV%/e_oSÆ)Z!cnCH/ Y%C eRؐg)c[c+xF#ӦXPWo)$ЗZiuJflF˚afgUƺ 3i9󊌷9';.fyY_\d65EbFeG}`JWdm^ِk**hbeֶ"xQ bզ.[HЩP-X{(LEd[WDmE;Mp݌;Kv`ox(4P, dh 8SĊZFavf1Ŝ"{Ȍ %595H"[Egu jr16ևȹ]N*ˢ*}шQՈZqЈ76.HFE=D- B`aK${BSd@PRL”=%+&3{Ҋ)NBV `kfܮ߿&~Չl?Of\rWo,wL9lI-g/ dPeC AT%F YB/>lwՇ>4wӇn[_2HwH)љh؛6Nf-rLZGT|d]>wZ P|w{]G^;z^6BhphCAPkV'st޻,GP6AhCJ_Odº2KOIR` yhUx 6JZ]ZVR_Afxw4b-lvM aNϯzu7ek#U'4c*1! T.:3E?$|zؿ^znھ3u7ܭN8tЊ!SR%Z'ee&:m0%mt \f$e,ؒ')$)r H钁i /^oFCN{l(ϕГܱM=x_]ϧ :}!z@% Kgv BF(ob>Ft6ABVa*O.-;ub+\+em&{rvX-f,L+aE]|:{.4FN ;) FIEQvJ+dF&ntBPAyr權 |D[Htc9kFΎrտ1[U\Qk K;y0>YFH6=J$lA+5 ִV 8h)_*#A*#I-k>j>PHP>o]@BH`[W)RmQMQ("iшhX +) ثB|xininBߎwpJC &o#d..ĘQ)H:5e30ɇTؽK0mSjW~{Yz {_$k 8i%>lP5PfWk0ZQA<ډUfrWh#eyo,z>5q/Y295d0$@B(Shم֘!o1$ʶBmnvuշkg?WFR@i2ugCo$z{Ղo{wɛUiu>Ok0w#Kfv<+Lq nk f صxQ[`5ft) cA*#~e: ]qSWgSoUbg'O,Rv%Oyᩨ}[ӳŽR<8xKj{>g߿\4g)B3e}[DUg/g5|=xrSO&|$|V$]'K?`ۯ]>ߋI^OW]Z^6nXj:-%~۸GF:;ZϧBg4Z_}0 <@&ͻ1tB:uBQ`tu}2[_pp ou ofG#/G4c8iiس2l J-ԁжK:WQ:٩8llF߃n*w}ΰgE7U&gKR( %Oq `dVI!:!^h:ۦW9X ]|Ȑ>im285Ie 6#gǶqk>ߧnn;{u|px3WƢ} %'=Z'H{&+b6De1$7xty|>ή(ThgAbfH9gtPeQnwwR:,dީ|/Tyr -⊽6=KX=k%$H);+ &%7+%l'H@6 a"G~? .&yev afߑtH+hRg/e4:qGPTĘߕ,Qe=R[AKl%C"55 =Zt1P#!EjG.ȥ3Vz QԨFۘ?FuOʎ,>?|'ߚn \̮8ruo~1z'o zd3$́pJ~ŕ(q^855˅p =;50%R uVrVq648Y8tsߞ-sU!ɘ5. kדkr$ɫA4W/sȦqyQ7t1z{%_ѿeA*ruM?˵Ѧsk%Ϧ}L˫GkCu.7uZ承.nv2Y哷g?~xq,?~_ώ_<-xR67 ދ e|4 CVCxۇZ>rw׵|-~,>vC#I^^~9&,jFO]pT&p&ts2-z~J| e]v|/@[Lw-S7G[ Y~N"d\oT53xOa?Gթqp>waJ#y b#&_R1C'05I@(CNeDaQɢ^h_I,|RFyFڻGa(_{3%ȒKV}FcB u"a1@dKSЗE AױÕCD].$2FfQ;3yF75l s;[Wo'bO7(rTm`o I#b/1Dm0)YM0zpe} AII"ʔ 6dg-rXDiSEHhŦ{)@]N9 ԫ*o$s)K\cRI%\`J|CѢ3Fɱ.`>'w!g?ej(eak% {2K*LjL..dldlJ5!CdnHS^ T{+yx@gj+.ΘW5J wt|ސ75Gs]W,O־dDlm ؝ғ7KuhXK{)ъl4+pMV١Ośdl^ vMo^Oݲ_Fs,ҬĭlYaq##2ٻIȸ{;0ȝGZ,Rt"T|o$gj{_a{L" 8e^ YFJ'+$XF=3 ]])^b Qk˰`:n$H7I=I=Ihdhz'e d2aϢi6aV.iH9wTLnk7J!t$0 B1wl&ΑrRw\~@q'Q7Ӛiu%aiZNViXө_oE_7Ƣڇ$`P테' 4 ;@ES۶@0 $VڬdG2L~l핆&;ZQ))(L$BFLǍumV-|1Wez~׎w=p>y"X݋XwM^׿U=]w[[F|WlzƇtSui>%W V{*D$':M:o_ :tXO5*&x̚"rh~ B3l* uDM|MoJ0zY{,)kybN;.PPvJ4Fm?`n8f0@JJ1a 4Lℋ^f5(̠U3xX ڐ"S$23CًN*["!dp 8ZuUHfNu!4fZ4lk%=[Ee$iOeEFPķ6L_àY`"Қ@k\RMw׼mksp+Z7u^qI?5\ٍiՂfלK_ Ta74]OxWuȷ_V?^~S/bIg[jT2?M\>4i=OW=yzWld(KV2<1-U;g;ogfYWߐ8u2`̡A>n%Ǯ_kO-|zlؠ݇foEl/ loPۣsu `EÃit "FU$l^8#-kеh-MK P/UI#DƾoPBצJ{(XcQERɻzϺuf.9)ԫOI mCĞky9S1螘H>juHe=B_ɇŴQ1OL73;=%z==_rs||ߧ_/p*?*xW`o:ξ l1+|`;reW@fqoحBЮz4ztT/D$ _R]"WE)b*]m %ڎBψ<_!* 1!ȼ^ԬtQ(Ƃ1d\ 7TS1y*]iV^AF2[UaF΍"!1MLHb̔{1Z}3>cK-qd1>qy`}!d#īo=\5fU\}0I\(e+ YbXGK)P!`?gVr7~0?Զb6M'"Vd5=_~zF]]9?Kڿl:/ۼ\_1M JqVBZ,k|wCr{mx՗,@d,s)Jʟ&?^-./k ?0#FֵPbG]1P<,Fb[ObuZ +Q} 0*oOx{~k5>^EEۼ.v:N0^lb.ׯyIgacò^FX>; |w^;Yqjc%播 fwN@;/xQՐnZnsx]ݿi]ϧ*&i!rϋY1ǹxf6~׮uh~*O/R <SOoHZXZF OLК <0g}|Ů\'s7ʈ<9mD#KFˏ;pRNS3J(y^jGxh$0NL̎5c1@($ R9^RLH $\~{䦙M{㑷_[n|(NN߷"@VdPG Q8M^)YPk)绤v igRiwo5ELhR /EJ؎%3rK~3h4M-hϞ|WEP ST qEY>Vd R/|I[TEqNj҃ax$$IZocDideaL@}Ot` kάRsKOBA1(wZ[ĥfHF hBN*Fi&i:g(@,J@m[#TɎ0UNGеP㜋:O}Vz2[͕4D5mx1knMΧUn0lҵT8SWhH)7c͠eveaiȬsM{]Z^zf~ȹfO5|ϣWiji|MMR/:AֱlWקlףCl}Q~,OTO4ڡ,.En}*qR`$ 3>Tb&SI%fjTbrO%0Da$ qZұDW9KRQ PD~@xTGՀq"0(h(Vci5PR{1Fc7.0!Զ|-Fzr* OI0X !Ar:>?M7Wƥb :5L98 |m𝟊ɧAk$|gF0tl^|a;W&Y (ךY +^lüZ=Cωh|OyRӛkA ]v2ne;c{9S)M".rJ9H^z,|~΋W_(j {>Dh,N^3*=; :mt& 4W P$KJ*slJ 4t^ripVP|UC;heIC%+Y΃nysuCuQY{ӝJOGǽT6%|2:LP'G\R!}e'$,P*.U&#Ӄ>PBZq:  ;h'4"g}?d >ǝi𻟶Z2Ǒ\ڕk;.48*('It"J"A 'ǁI#w*+D:υfB)jcDgpՀ;m< V;E!""eh XV:#g2.H/)#)S)# WRR,IghZb:HPDrd/q(~PTh:;!4X-XScN3XthX ?D:# nQyg̡n2YD牔yF `$@D.ʢN1X/c;$c3Юz~[~&m-~kr5 ɠ8qtB;_ qe: D\ІH1H~)2 4yIRzX@+i08›ƥy}Ô݌ 8:oY{T/ުSiM\[!y:HE x|Ac,8 8(1$M\(eȔ eyHzy RTq|6]C/;'IO¥h`mYt%WQCYT.-h(y& Q}drY)g52Nέ>(?ytiHӿϿ5oMdk*2~ʼn\OhYr2=5U ] NCGcq)̹IƖ=1ȉ)(sw$Dž{v2gd l/y6WY@J8j=!وrmjAqXP*?EؠTgߋs[)j^~[;8؟@c{~YW꟦ާ8~@;q*QuُXy_)FJV?y[=xu1=i>8\v̮!FCĜoa:sbnDp4"+!|rO%ukO麞ںa v,QhC*3g1[l'>g cZͽkX5yg2;>+|I,FjcbTs*yCyC`X?.O~&?՛G<s O8H !KSrm`N F/Gt4UACgf:]8<@Fn@E¨:-8-F$&Q1j~SMI^oɾ 1.O$i+{ne18_/=JZRtD˿u]&mNCe&tRjs [=zȊCAΥW]^O@]JX |IS|/[ïCX6Nr𻗭zkai8:lq_2%=ův0]tsfoɯ}nKٜ\zր,\MiR=K.q߂a]ԇs'ADJι+YR <PZtHYb; sFm 2΅HII#QHS)O]cvuF2劆%ty\y&> %Zԡ5*5l]:}xWYZo|̼ ;:YE`w7ׯg1k#׶:k^{}m9# /1@/w-Gl8bXY~>TVV0X d 1$6ERD;*9uK,Hýz{F:)uq{ w]~j Up[d- տ-s-WGndMHTp ~1 j,BnvMkN-\q7R3G.#.U<1N=[Fe:y^ w9`Nr_4>FW"lr+ƥj5Rv7XEM C*:T]i-sԿ-FϜ}dS07EHo?,/3ו`zU{ZD>(o d@^>4ig~?1EHBNfg]!*ی7>"6i-d:Do(VnlE#V;])}ZVR+}Jkck5xI*Qҹ5(Yڹ 9vm;ҩd[ɄQkѵlSYм_ {M &ko)ɽ%֬}GZSti֕Բ%8:A/RI mʘқNq'4/V[MNq,IW3T,t/FU+Dn)IbPxXgd )^ Ӝ6DKOhbv]H9.rS^c;t4sO.68 6N LG=X| ,]% bduQn_klƌE uR4ƅuչ5Gg:?&B.ۖ* *᪡:`Q4dCNHMTܑ4r%6Flbj3(4[5=[eSbS2grCLh;{ BHxOt;bI{ل5eVo= 1,j=PQcA-CT6PBV6kB}wlZcdasc/ Fh؈q![P*n.MYLDSH*f-ؤ7T+"!`-%GæN 85BKaKrXx"rHA){$@Ln쫻Z7A_U@^0_L B\*RwU N/~qMɧ#Р;r7c֝ίnhuNZ~=ͣѯ91\4]]r 0Ju]\.J]: 60[}{>ppgଛӁ']\-"W}oo۟Om;̇uks!.JAc iECtnE^ o4̃%11;U}v3>ep@?L_y`P6.7"9 p=.BAȻ 3#6}w"{ yuo"ծ'4 \6ad4$EwGL*2CW[xzpq.F^p|z@s6޺)?S1csB,gORoOO ^n~ٽx0>mkGCmi 4{gM=S]\ZC95Mp%JkvZ UX$ߌQl:5_%+Q|.~?U,/N*]{Qm2ܻ]wYɁR)i]Ň)brt+`&3TYݐ^-0y|Iog=?^;^Nꧥ>ۧ@oߤ~/5 SXp-͔3r wgdwM9YE6Me9 yل<#gA0'WEwSew/RYӣv8۫q/, S&3a`$!~S{yoU)~ۀxk$-OiH{.oRzmٖM>쫯6ʺn4&s1;,f < czfڐ~%zUK:eTx>Qf2c p mFIGz?*%5C=8z瘛ݣJiD=J(@E/wmu65n!g5GdQ7>H~H-Fbc d!DVvKz@<(#'KSU|0@*yZrZF\\[T!4'kf2Bf`5ҧTy$/!]=Tu`"y|ou9zS?3ہ:_w'ߨ'zuq^^m;Y=g˾hݙC8\RoV}mNNŃ&3l/'?ܬd?l`b୭W{va3f 'ar*SvFԛzQO2dUD#.^wE3tOVӍ:rc-?͛tZWw}޴׫=t5¦Ip-o>gṛG`#k9?rhV}ʩTٽ_>5nlΰs&ad/i^w{e'>*p͙7yؤ3 xm58ΛlVwG:lߎ.g>_Kwpubsg6^.\Hmzؽߟyex7J[o$WPtI}qJ-w%wЙ2g]e݂Nٖd>&=_/'dѳ'}ιm)q'/^r pGBI?6:BlR6lbs1۹\v.f;lbs1۹\v.f;lbs1۹\v.f;lbs1۹\v.f]koǒ+?mV}\,`/6_v7i1I8[=|8$E5E*lΨ3SU]SE1[E1[E1[E1[E1[E1[E1[E1+f*l:|lbllaSJ}bB _?t?3a 6Nc9m[TKYBx³,g!< YBx³,g!< YBx³,g!< YBx³,g!< YBx³,g!< YBx³w Ϫ>4m%uPꭩ]ள~R?WLؐ/ViCJ]'ϣW.I_ga@xf9 Oyr_&mxvxse%'}?zw'[Zh;s.qahI9`Rujb ohC]CjHNJlcќ(5kPtv#C))^n%?އ&'<%*;ڙ u (֝ 3xv^S(L17u:jlw"qv'4EicIB !Jd&e5F #(e&PTg}xl:aWԏx<4LDfoDdm#l<{,^ P9UD()+iI +T[SŻigRSTT,Ycm0"6*ã'd\Jq\1.nSQg&4)ʹPESA|ި2?P rfd obi!fֱor:]υֱS'xʐ=s!df.%s_N'0 ڳT NȘqF"(S"\ؔW۳ОmO={LpB{ݱg5V:_ARJ׮$:jC5-EoduKV~TѶomY﷚zO{f/.K̥Yeu%Nx]8&}q-ߺ |ͺ ?bl.W^Ib~nWNnnnnnnnnnnnnnnnnnnnnnnncx0XAt<: *E\ ruT)5IJ7BfOB@)4upPC8m 7=9-.N->3M-/P3+ 7VTpRVk8Mf%T&WuYd A%[%xCly(1t_ ֗Aԉ"5e;_ړ~n 1|kj3 BThIEq1.,BdόIc{͓cNč)6d պHCV 2V<@V3g$3g6V(cbIJ1lŞEŸ)k^L;og-:Yg_Ygϛ /D8DJ 2A-= :"lbvlQ'8"Spy_=Tmt.,Pl"9#eҬ5W,PAV!J:+bwEm SvV9%WX'7;mf&y&S +%U  机 4U^gaAlR-ڊ28 M$ߞ ?3ؓy'?lW3*5H8JJT(e}L6IC.5$KM2v;"0qɈ73Ss܃2 I3%Jo@VZ0"T(P%R"1=9%)fw4Қ6A'ua_N!)d.~s/of=;oH@#J ,",",",",",",",",",",",",",",",",",",",",",",ξ٭rjD\?k~;&ӾX%` XraҞK.LWٳaɅӷK. d},/h+W;ٿ]"V :s֟[Pg$3Y])fR'l)uPBz FZyZw$N]=ڙ[>R/|brupwuq?|tf.װ~·|S˻ :%xv&fYKQZX ($w,L|$sV:ʴY#JA+n&:s*_<./EޤnQ 9B4zwp^~rxqp1^k Te3J-r#פߩÛgPM? zIC?Xӂid/ ^OMvdu軯Q?~Lޗ8<$yK,OANg$T {E$29"%tsB`'x$% |Jqa?}9^Nj5[ZzXs4+;7U~$5W/'9di3JmecΞo"_nwrƿ4lBjL,.ZT\EiqCb:M-x}$9ss>q :pB ?Z25[Kl{#o+/%DKBhYҮ=* )f@]Y`p$8fb֗rK aCfٞWг24!E2p*&in߂al:ۓ i%?lOUJFkCb]Ӿ??['2~jyMŁkLC! gP"'9lPjZqEC($ !wNߋ`^u׽PȪrd OM,Ȭ!:e},nTA2-+ݥiBܪal:;ֽ^ލݎ}l([Hon*dzsaRNvu7fMg얡gCn1{ծ\zInds'Mn}M6]G0 /Mgonpe9받A|7Lg>lǓ>dwyPKG7oI=^.6un8_2&p` g0Oݾ7yg|9oWbA OZpg۱6g)ɣsl’=2fq^6e·Sc/c\>܌>O7AI5K8MC,}Q}T8Kǯn=)If5u0" 7Νj;AH`{^aiCJ%Vv}O3O0]:AӇ].-x Gy8’)+,!jE,8CU<6 'U99kE0C%mhh[D"ѶE Up̱F={O[|ޘ!7y#7y#7y#w*ގ 7v"7y#7y#7y#7y#7yt PI-qI-I-I-I-I-;o,ג#%ARK$@RK$DRK$DRK$DRK$DRK,,eKi!rDZHDZHDZHDZHDZHDZȣ }FuF -dB YgܩB, H i!VvMo,ZTK& M׆pQF}!ݨ8mfco5=VόgS=[{`tqNJV")ƳXx::%<%wCEgPL5 :D,/|h"Xe :e;$XI&x'tv \D bůd ,)p'žw~x݂_2='Gy,؏{XtCa,7aD6iCb:EE'SQ":dv`)/_uěIe!|l_ MSen%:YXwLzY`n c<"{p`ގ8ɽ;y-[c;. e4)GiIh0E3O*jc Js. zxhG1ɷaDkiGVf A .LD˘W"feQ&YQ#u>MϐmH1 uZ ,IS# %{k1yLY: ٜ^fyoӒyE\ GR(DZukW+krm?4ハҸW̹xZfsLǾU"mZB/_Ϗ7/5",}ދ7jEC#C~i։CZ>9ZT0izh^vEλ›M`v1[s y4<8\̭ts'Om7~+ؖ©}H h0b0Ī[d$ƓV? Y,d<:[M|p[Gl6wpQge1d >?Nt}2KI`،_wjN5 Njۃ8<>$vomo>~#㛿kZqD30OdF&{,oOWCכo154ZZuzQ[/'&㚚eܯGoh֘ ?|y=XNx,m>_5"Uz&1?Z g좥T)y/aBP6 fk=<@8oK/lI|b}tMFHeTd/ .Nh DF$Ŧ6Fo#Uw6T^J]t8;;+\n(o+% XG>MtN 9w4\ãkDӨ\u"N# ^;fN' Q;?:t~N<[x 6 6>gcv6N˻NF &I 1%*KI=YvgiJi^Q{qA9)YˀG a kEoe^OFx0̝}bJ8p1kfw*D8 3Ȑa xLKH޾ tGR"<`v$(h-U=8C}LIAKKBkQr`1^vUY`:&F "XaJXYȬDώ?A?cRygx)cPJBdZCQ9@xT'`J%1%rOZ8'D+D/ciZlO kPWjgP|"w XkJsL_@-5ٸso4R!i4!Y@YHFr kPA+%"zYthb6t Z_yk᪲fr CTneQUTs%#86˜lCR02hpLZEѐ Vܕ^A#aV0ͤ>hME "EF^8W3ϲFr#'nU)eJ8a1$:F*fMXC;YYΪsC9|}#;nw슓U%iEЄ, 'I!LD4-qL'{/5E{~bdcIGd+y@k\f(|IL%]ȉPԊQz;i[p%P M^D xy_saV*&#10- օL]襭Gh+~0}YWtMn [|9P  3\tؠhH-8 )$rދG=XuY.HG5tҡd6]$^u4mu \ $J(* 6y`&yc8caO.!/VJȨǣI1&$%6΀j% ?,qӤ۫%_`sz >燃q..e}IZu*GQwJyN[m͝ KsY ];Lqr2V4(+5 4|ƃ9++gRZH7\|&s|ۧ_|?'[0Vק?G_Yys8<^ X_n\LQݸs f صP:XF/;ݹ g^H.ҼNA+2kQyM93FqdS.^=Qֵ(k7芿Fo tm.=z*iᘐU E<*e/NNaHeQthdQʙgki':{SaBwTީ0ݡSay<Ȍ ,:eAx57 sV!@˞3 g" m لuKgI. Y.Dq.+Z)EF"(_m̵ST#}Y ݥ$p8Qv5֞>;\=ӣv[/ࣇO10~y,̏e2?.֡3P3Kq2i!viж<ՋՋ_;?{i*tt4ݏ` N/Oر#?p/jKt?*Fj4'h)LeeAH~+gy5-zr^y B e~H[GeӋo{ۅu1iQD<䵺aťWdίx5Cd:-K`旟 <'E-<吡8_ gtՌ=VvIɨe+3Ɇ%Kqf~bmLmceֻ/z'lA}_@v:s3g9aĜAfK`zIZΣSك|Ұ.J]rV19gďUQ̷.t?ZǔDM(àJL1@钢svFxPx{Y_z|*{,eIRG&Z vA[s"P2GZB9x0tIizf)Rt|!xxսZf y EtY=~ddE y4m,8l"ƲsHɐ=!{SGLCVkMx>HC׳D;` IjpdR@t5Ob ^{bK}qIfidؐkS3z2*0iX/դ;#^[dp(`:nj[vp=԰-j("FW'M4x"8C l3QpJc2LxTN2ZBrP z5^.UC&!8f]{qH/򙌆lol< fÃ'*HmT$o~8OGi:O_!f%c?8LJh W%͐i@mfN )Ry<"90x?8 xI남`A&]MF)+p{2`j4c;@B1;5'p-oMȒB*bDr 2ҙ;L;i'rAJ[|&'o~089:b&r1Se2´[nx{PgNq]-Se^vY,32rۂwޖp$K~(2-f`lpsK Ed<>MwĽ06cEK;l"⵽S>/y7]1txpWˆLRKZU-lu={㤯̈́wcYf 6bWَVvr[N]Æ[an-QGv?pTe XP Tc!pe6!JXNpΖ@[t\ UnHT3 O,l+e& ‚e"Ɠ*"Oۘit-ފ^W80惦SuHo>9*.2лs\2JZY ((\cG'#x\Wք^5ZyaR}EI4$hƬc+ٟ~eriҒȥh'}gKMZVECPbӭEJ - r[ȤSҐ-eAK 2`!8T₠':ȉ@O; xsr;Uv|a[퍏b<,So@h3$Ġ DbD%j5an(wR82t$ [I/a"T !1BVq1ʂZ#kwwvzZSٻ6ndW \N''g_֥ D*$X~3$E]x15h{XeY LwFrJu':JNǗM}׷~xrd,F77n5-5z2g?sj-"V'tB3s;cDL!-Э cd7QD Ճ%WВY`3)5cgl֌*8cW]:օׅI^x&o.09dt`252ŴHUSRX+>TJ3a'cw&qxB^")n8;MC{1 6ɨhXP79afɺ8wEfGCX;dkmkނ]uL ^-h `4qEϑRL;cPfƆDR2PthId B@0%hCׅ;#g>l@<ʢ(}шǮwg׈ruI \Ĭ@PJnOAKL[2PJo!#P+mw1cacJr19 ]JQ,WIӱF쌜wH:^UzQz׋kc:)@3!#t'PjG&d@F Ép^|/3UM> p(=լ6'؆Q4[яOhG?6K -o )q4iP)j傩UB^{R-fw|xGQGq{T;ES>4cRYaR0> I2ò)Xl)BQ YLhUPYvk@]ي.r)&QH G5̙`-k+r |n-q|{F(h]%|c[_ˏvau܈'W~w;ֿ5v`bJr3VdLX ވÈY/ݥ{xTvCN`Q#:o-,%3)KNE!|Bn:ܣؤǣ=+qwh*W='qA$ iDWJ&iiG˹!D[9EWȜ)ۉά}]{ҁhbg+ryT+_2xiŶf}'7N{F;Z#fQ2t4>qKwp uk]d$a-7=B˘}.cnHKi!Ye%ji%SI*rR .v{B/;+Ov=WkSg';WеbnV y}β rچNi$z &QF˒+Pɬbiw yst1xs};Upu .pV $T94tFf x<MW>Y1EW򉐥w^s&59^Vhu5H+dʵ; =r#G"d ̦BC0et\6:dk&«8~~Q ulG4Gŷ&G+] $; +q?_WMj)O \2+oëx18!zSJkB4)> -:䎀@q0L÷Hz10L&xO/'SRgxٽ-(⪜|!1o4Qh]\)\ 4ot=oO pZܼY?G* xeu%<Ѽgf5Z*R-K@a)r vHG71_/hxWǪ26ژfOWŵF6.;/__L.<_6 F3Ĝa>}έWsE>'[L))VDr'lU7be7Ī+Yf$Qh,U|4|=oTL^γ<6;ejl3`5Wl)]4kۛT)ůx/ABxS6 & =>h6^2д*[勲1*㋵5Q#.$A:ﴈ8 \R{pd"EyTsg\Ս=/+!jz[:Bt8:*>2N*x`hE&{:Lpzq/i;H/OwFz}}un aVa;O#Wav,; &@j/*2_ig D K<gc'V1;e]E:垆 /) _25'S?e\^!I)21`<%in 쏤$xzIP5F9'#+c`qV'ǔ 8&ԇ:Gv;,ԛ*I8 p*>($at.#bC.őBKa݁1Y012@J11ctLIy F'͔B1%p6@Z8#=1[`}}Jo+ x3w>;\^&\:; x)T/>` ux؈5]ʹBWn៦[b^u;uǎQӴ5;ц:\E]gH^_>*@DI&M1REiᖎXЦmZ<߃VF41"R[eolZ`p$ אָBU?:{r³՟ߐƍ?R|uX5$Ւs$le톨u%i3A[+gs1JʨdϛO2/=gy"Nk?i5P~$8.|[oEQBWPp|vkG+/Ee`@Q/EQN7Ȩ\~Zw܏ZWi7pO*и0I^~&&er^(m*ЅoK<](%N؅t.d϶;t$/toeqѯe<ڹq|YPw0QU[ϷpAwU>b&ʭٛ*5aEX]YZ JH>B?XE~j]l>Z6n ʵJ1]GtFi4ZE+#u8q'(BoDHmvaIKl(n!tOb*#g*A=@dAR);^GnE.v9[T|>zb[je͓[=pyb4637>~?ˏWZ荋cz"Gтuo~.%[910*eRځ]Da,(.m 5f^DŽRjI߱{;{;Q@Qr;ҽhd"j电0t@>3+t.mV\e>2D3>t<?hJ-{gWXDZ*o#V"T"'/(uG!@d3 v2 BԢ]X]@UKƣ_)e}=/:cl+upK]:2{2GZN\Ǥ͈tXIC9'9V+@Q{)7=`2ɽ+܏Gуeϳ„EoSɒ`GEʕ{Z8xVҤ-&9ukՏV}2{㻵.g)~tN %[YwmmHW) @fyםݎ\%iRMJEdQ(* J2NL`M E0RIeLD_g4iG`Dl[70NմR3 3S^E36Z:P yCgfRvIC.fzg.MV"G*@ZnG\wT<$w1yau+’$Qm x#SH23CEGȆ&eF1‡nTg}c&IR NJʤ<_&-TOhgR&V ;+7΄ ]FB2JřBu@1'XRo$F1"6yD|U|:iɾqQ 8stuhD"E)UD%M`P3Yp/eU1fy9{fIJ;rKHf_T$}!VZLSlȘ%011 rIޟvvMR1 6&JhQ7"D&%IJ"cQҪ$L^9G73/V YZ5PS6ÆcT, [lj25gG;_sZO(*_MQ@ F/P"]6JYȂM5@+5%`-M w7H΀zQ3VZ :֎$Gu 4%ˁjZj'Oz[U& 3ē Tͽy*Jg0l 6H@,7G˾5Ӊ=6W[[x?πwԘpJʄ-! q$Ya"Ê$ȔdE.v`mVDXYjF~ѽ9SLEf|;I3%3и ZfX *EPldAr0vqKz}kO뚾[o}=kܤ[41'뀄!䭂M%}D5*F%;UZP}QYDjT5*kNXcV2]  w,LtFԥJ^O r< y4DޙQw),o+]fZr~^̅XMy4xqyɿ8fVSy "vfm  SmF6Nd[:,Bw6O}>oSbXAptp^yp6az?헬Jj} j@*R{95sAxҍ+T62HsiCʾ@2P5C+*{ Y$t,[U3q_Zh,bRӪ՞i1[ ĬY/>tjbġz)j]u_ d#ī=:r^]sɇ U1D bA fgʔ񖬠<:Fl_{%dCl(IIsI1|Buƒx& d7zrnH?<wj3_įh~q6?h\4z4k|ŤLgl~,u5 x5}]D/af` Rk`G\ƹ~g%寣^MMwF܃.uCK\bhdXD6!ҷ } +w61E?20CE+Iramy53|gW$N?!\,Y-+0_^Ųo?|=lqM ̿ίK]ezr Q{e ̜)EXmfD+7nwf3p=]+fej)N.ju-jz߆t?s3nv3$=_/nQұănꁋ[Xw.Ql8UêǾ[Oghza+D@RI{DPL ()I* Mmm956E k# lvEqAYUcYK ?]v[Iq5u*L7yPEdi {U}ءQ;=u:xF2B,/2W)өΏy\ZK ӝZv >N7N&FE WR{u0 &%y.EiE%IDBI Jp4*$?Dk! )j*[dWnsYf pA{Fǒ>` q^9[-StbH;KY6D%%/,t&]QBDLzWU/EЦKPg(&OFk+t\”٦Ka7@Եx5PPMo5 ~Rԥ ?ߓEzw/VöY`~zoDq=t=˷Ѩ4OS6%RjMC_{Ew;L̞tCeÃѽ FE-4z(SK>X3QѺ͔P[tiB5ҔPRq*(R `/!'B:M-q$9ru>hm[<%W1B;0o pQ!}_IV090i.(x$g~h6Z6i7zwߑp#i?HI&#ᄑ(rGT\aHWc).RUE`"?`qΒ:"b:<;\U)-p•E])\YqDX\űK{pzb)W? \9]U;b+MV)իBػW?׮ЪѥͫZŅ;{9Mm~$oT;6ϩ<Yp,_ ,Tg۸&W;?\qJj\Ѐ3EDʉr5f)q(Q<~$ΰуnt7_?u$eM]]LQ$yW%߼zhOGpm7!S`~Bfj~WXއSv^zy_gnsW߹Ï<15SQ ^ff|\a\?x- 3FɽMfeOګ텛f}օOL5j҈f̓/і俹MV-KG_!zK%=)~\ŽScWuǭy\}T~9?_%|.Us,TnY5ЊcCGoBUIX UqMq\ f1q 7A=Y9N?/;L:j@Vf&t3 uI; F+y1X,XpY(4LjM]hpqqs.ȤՔ)$DٳweM{$-~qpIbG\ۇج /Mzx-%.:+Ԇi7N1n@i cE I P;tb|M*L.e{%`)6qًLkSY\w6T8%ݨѡ"De @ٶ'2ϭ{%빍T*& Hh0(76HU> 9O TK>d0fQI3ni<J!JmQxH R"V(X%Є =(y>Eg (ddQ{Zʢ}Ics*s4?]B|zC\ds#paVBWaKVZ]JnY<ѯ?)G[`'ThS_'ОT #S8`Z8|0J('hń#73?:/ʼnk˟ .zԁຶ]Λz͚]yщYtwipi9ǟF~m,jVG Ȍ^wK#?:G{).gj50gc}cI쬚3x&fn۵h:|E[M$Ntz9G:u9̺G8 g0b^grcƋnjݣ.kԎg5 F=u;K>rǸGYtd ϗwy=ݵ#d8;\\Oa˯В&WBk gP> ʎt>OB'j6㋝Ѧ7qqc$h"0&*' LZnp~S7HOlX֞?p9GķAFD@{!-к3p$ʨY#Mt L߱?]}PqsǍ-ngcG v>Pbqe;i%xyBtHHe9Jxb+"JFzBBַB&Ss/_D!O4҄yPڥDSH$\qygbcG*d֐8QN_ ɂh8ցrf0aiJh 4tzb D_N_1vvUP2'1|ʢ/"&9+"j< =%H>Hn GSBuU%|d(Gw$J@' P>cK x7SUF"@QCIy $#Z d%Kvy`xc6@>a%(A=I"DD)CB@!iZ cH.b6>=~Z'x[,dА|t cLǽA%ni 'T ȵQTi*!|4JHadԜH!prSC'ʥ(e-Pj w=5 5]5tѬ@9np@c:Z0-xkd (S1 cgirT@uq/֫{wr"丹Y9޹83yG\ҵ:3g\O4VZXȉ0*Җ.N'(C*<|hj`ls { 8z׃x);*)(U$B`kQ,2KkL&g eT㣡=3;%Ҟul 8:^F;ksNN-KJDa],Z3mx=co;($S-W@dS)l'h͙DELIPG0T cn%^y]m‡*s֎52O;x;ۉ*E\`t1ѪEWn,uغ_2ĮډөTv]bډfn{;e߰vON*}TFi ='Tj,Tw#U7=/%j?>oO]Q=|1Yq>VZbJZ)IhZiqa,4XTi6X/J}چև1i9by%'E$~VO46 feRD"LfQ) `wy)$0e(!r+ƃa 3A9C5("gIS!XnHȄF:0FX9lV٬z}Gӱ+׌mVc7v#Z'6<\mE@Zgek6D(d !1%>9LD O3X|t,C7URJG8%ja$Iͤi|ʉ,F"lHS>`d}|0A 3[d9 ;%ߏ/SƚdjIzpILFƆ zyJ>9|{(P~YYD&-m"\Kh@&$]qe%pgXTj;"F/K`H'U@-ƁUKZjV/콭q+Cgg̣x ^>n~T nRM+yY)>$|eJ@dQVVARz^|=/]+YգVd)T&ELGAb{i竌-,MB0un^iy eP]J&f =罥BD (2w+Ҟo1vz%ͪbq=4gsMmt~5-ond2}2/[b3>)bJ!>a줔F%P)2^nVLfyDcVbsÓiFRkJ%L(J!&)ls)}Ť@nqvڃ #0md ¸jj ʁ-h2h](~6HRiOlɑl'Wl=,9ԑ(KJE+2xBBO-i\F"duQcV+ᱬєm(S<(`)eTGK!VQ LaX㻈KP/gtYKՋPX/^\?z#4paLkrt/"4UтaH`}cчŸc[}( C>܃ 5 ȍ'q ~N7ٱ\~<~...Lj^vkm~FPԠlG} <@L4'G}5-5` B8(&&KE$IzV#GRĀPWV; 9Ssm.Ǹָ_`H 9}wzeq{}3l YG4aӵv֣eӡ%Sa*DByBvRFyW*ViUj%Z=(o7|#G7^]3O0]* !TȒ QP: N6 'VqI1ED9Q(}II9E !1"J)N|bpGzDp\D ˄̄^ǎz^*Nn㊗Ю8g B~tGySw2C H)p/ k% S*i!I((rkpU{UrI7-$U^>&$_>yu̗ʧ A+hX$$Kegʙ%r8PdH5`MEXfJKYN:Ftkxa9+FNO9o@f0jv `vKt0`' BLPG8)XFPDڀG?HVUj۫cbQN{@"!Sr՜erU+if#W( :bj D3UqQGOlx]͍!fRCIE։.>)c4Q"aB!#RiwJ[nۜhbꋱȱ _ T#h5*yZsQYyPtƱBՈb(C2@S3-_d:ه2T gOQ+3S[I}n ~W;]©ďϻ, ~3D~K.Qu73/?fNvR`[2I8RҸQwrKqu|~/;n׈E?|N.m'G&<fEN7LøΛ7oNI'tYf/-i3I߹ {/h^9T<&o }24E|ލތG\;,R| t8zyyswqkP&WQB7eZ6:֒ā. ^[ϞSd09`TKڲ@U9盔8'P6涚EZ蘬-#-Vi1r ëmv䲢3N7 6L{zځ,5-8ޠnhǙŴ$_h$´ὼ(c-E4H!i&mb"gNŚrN̐NF J=>XD1ȆQS Rpc)TNj2_r7sA=i;tc4" 3?otjƄT}0j0à"Iʠ-D%Zc%kW,EO@GZMm]Vw["E33gqKOBA:E&Nk I`LSV ;WPR"13Rcp4o3 RHF%S's itL7oax->30n.WHȵs.MY{c3Zu@2ǧV;`[wϭIt:ٜxlvb[Snƛ;zyz~ k=]0>;}A l.̣ T(A(DąJip/pe]SKf 9UV8CYp&e6Gb% lN ,nkޯT6q':h4WΏyEg.ޖ=[!%]"|TSk#V䝅d m";Eo$dDtԊBM6ZU_ t8J0.#c-n3|W b;f@! n_%ύbHD0MǍva?h~XoofgAؖe푨Y/)=>A,wh!O£ϧcQӝ#wQuR"OE]ej'6T.V֫QWBpC"ݲ7c;ni=e 8E?6A,XzqiL".Jy`{0e_k~TPN(kƟ>+DlC3~ys^̤ajLeu౧?W9y1]-Wެr"e9YDfx;[>kw[6L Vcdr "!.ѳ풩Z廴]ƓxOVj,h/jkx"$Slp:+UˈyՖ>Xێ݊g~h;v+ZVZ3O8ʴNd$O* N$N~MƏS;A81-J>ql1|,JI/f3+1p !g\2qtƎu)E)Ñҩ?I5$lrX%Syԁѹ\)\Ӻ—&:K4Y4^j1ut46ԡJ~޶?/֌WkwӳG0S{XINOєo`GHtVvsA u?%{/zv5t4r¨Y>%fmyW8. ֳΊ>-eL>?N94AQC~ӅwɿnՖA?&atvB'RG_>~O˿?ӗ?}|_>N? SCᨍ#L/"`MYޟ[Sv55ͷZ{jŠ˷n|yu2mQk6fGau$Ŀf~0==ƣ\\Wt&'D6*14CTx*~{4.ڋb#x[(E7hB2(r9dk2t dX' pD:ffSRmtY`Zj2uQ9m,S2!P(Aw9wyu6 o)Y N+[*7 ꇬB"ED-,zt1]mKHJ`{U%h:cquv0ʗ={xzUeYNO o.'MȢCĺls#[T-W}a(M>0ЇzBK ǔ2BǬvsyZ\ ko:Ko'nvgr*D*=JOUڛTXnKu6T܁7Pj W"Mɸ+@0/7xަ3e QY+MR&Jkuܒ6GJ+XCwH;' h,qdzI ZȊB6*Z\M{:#nduS!;Kk0,+&$B@ Bޞ{i;~e/5~e/)Rd/Z7u'b)&H&%$ZK77ADmǾkoA^Q݆di3[~XmTz.$ZAp4&F˸"JŬ[`ebD>+\V^˾¼q>ޔ*orl-~6Ken]mj\En+!XB(өJP@-`.zxM#e>%ZjQGL w\c"4 I(y6V:-7âEQڗ Z"ͦ@eL2Y `fЂ3'AL iIO!h2!$wx}0(Bb$L4LGrD$"kDTNj!K-'FN@i 0IkthlRF)$sVv,g^5 [>T;+J *p+/4%5F|`F&F@8@ ދ@"pZQ;gpd,:I%u*RXRBI 'CVv/"*aC++QnCY(F$5 \IW(*O"^`dyt>Y[JUvH`51CaXɼu!JrL=;EMv VsW?#EcB+> 6((b EPJD9ţ;XU[C8d_逎ޢbvuGV_AaHPe@BlL&>Z{ -(VaQH~Q#WY$Qd :&+a,u267!h~2ԧnsh[:=ŋ7t}[]C=\Mg(ts7=I0Z4('5M4v&9=WNoc!`p״ȵ<۶~7|LMZ0V}S#;g\![pVge>'Kt<۸۸+ b5صب;r׎ki[R=Bc?"[O(]gsVK^ޚmL*]֥r)}ʐc5#L+tFa$kJ> n;ʑԕE2wkL"q[/ ࣷ"^{?1ବX263SN'gX6(T 6}+qo"tU9<[ -(]$Aln79I)9APuiljzև!k^x?͗mb_xB\+j,Da4: XUJTXx.BٻFxPx((N8,aIZ vw^r"k @ s.[v+O|iMKUrFH>P`z\=^\8(&1@+ , S쬄#}Z ]d(RdY@au+yoB 0KKXrAJYY_=?O.Q^i\,y2b}ἌEۉDڎn.#.d{cT6bQgQ-#L:ˮPQG o^=`\n^0yKe2!wȖBwnͮmFo|L춓N.źRRr]h}YDv轶E-am!Ǝۖmlu͹PFk"KTFhBJPfŠD\eMˁ[簊'C?ޚF;;=o{M2t{Nulj)<-mTэ"eT^N?yd:BQK\{dvǘןy7]2貁{̓LRJieޖjWw F݄(قM̬Ffz{N~5Om! tz:? HYVp ֔dH a|JB>;{ v (5F<3f&D&'!D7!J*Q?{֍ʁٻ@e MEbE ص,Ľs}$Ym9Q8Eyfgf^P^Qz}z_T-be۪% <&qFA\j\ .nJ9T/B3….򺇏jڜ טmQAyJ?$\&5j I:B2s^Kw(ߍk!kBFJ ^`;a MLr%/%_N(Eb:&ruPHB`T<Z's€9^w:e^һO_ej5g!Ƞ&4,Pt@)JEbei$/{)Q?D HG4Ϳh>C!Q 5PDpJ j1r6(ٜM0N,'J폇7Mq[}Š<9nN: ]õlMO=v?גV i NX%Z>jTɥde:F)e!* G 耴1T=kd 6X[OJkb׌J1]X3Յ.T{]4uFnasHfǽQ j >G\cˀPL0"X<X+vR-SI3Y0Y٬ޚ'eƞ pϡցK%8 A1NP{00L>&.ջ95Â+Zwl9)^k*h)M ˆ@WSEM^ 8:XFcβFcV6@RY"ZȈ )yTkB$^ȴQG43G:*k bׇQ?ȊqW4b1V#Ҳq^#5bWIŹ`r~jI2 C. * PZ eHhG$AKYϸ༆(G=Z )g` kb׈"jzq4%_g1.V/BazqW/# A?/lp@(\؋ DFC`'ŧЋqǶPևb;}*lͥWGcW؆Q4_(sTG?6[ nGڲ}75@me >hkNbA(Qx/s5;X{^A14)H8^*"IR[|&q>*&4h@u>H]ȷ(=1r` 8@J Fb<Х$|?Z˫)f[28\|Fc9q_-|s#okҖ@j5z]0Sǫ/WA6VQ:})Ўãq4Zz.qXC/ @א/.z#k5W;XMtf]#amz{̞>0 FqoZ C(BUmRT`8C5i01hnˆ Aznl $ TR$2A3⩣hjs 2HBv@sJ+'>G C#bs&>/} v04C$64Z8A} !":RH5xbjIgA̩w= oQ|UzpNڦ`{9PEQ|蔞} (f% PRj"И/A()Q P)1Z:CY,'љ{,V߮{.VAB,1GBlMj۸ȍvptܿ>;b3%M万N盟~(EkZ'`rKŢg}S0_6 3?9yf,kYe ,[xCzJ6mS䤙i`EIκhm$kjH߈0m 1]Yx2,Dz謯O7Oկˡd)ka1q]12L,h5T"=&bR̗a<lKC)5GwyP6| -U)Hä!57:Eb8%~My9#68>Y_ M2a7ffL´=<11?F?6ŔjAѼm zkG.E.oͣN P.W]y≹a GZ0墡L5\9k6%rfݛoq WAFԃh.7'˸ma󿷋<[0 5a:&oiv̪xZMӪu*wU8@u襁U ^mPhvBnnl?^V7 xE քueL-]WWJQW|W}Sq O+`6d_ٛy,w3ԟ$TC"c(4H=8#&)Ov 'Uғ3Zk{sJ;T/ \ P$Yn>)lU58oUr읝ER>hr'C;:~01vk=HBy-4n{g/w*n0;d dGVL阛%O?~web#u&R-g0E#X16sg ]]}l,M $XBK4)ͪSS7jC_ aJnχQ˸=B%#dNiܞ| J y/|V ~71jy^kGxh$02_fFsEmdQr"q+P< .qgI%Wz!1D g*[?bN_/f>7GW"Nl=gR -GPYf}Ne|+VdPG Q|OR2Ҳ"9P*FVG#$K3J1&*yRC1rK83`0NgVbbMx6Eۮ;^F P#$2F/`9J2)G:0ZCT6V:p}Ar-GwrDSP9K-Q> Q"b2P4ډ=ALS@8 &Tw\0J&ijb sD Dy &rLƝr# g^4yK84}f ~? mV0_x-w?qәu1LI盕o  6yIBrJb.V/:~P($mOYŔ ɋL)L,IiKI!mY %"Z(@=Tmb?}GwHX᥅u8n"A^IckUR]$`d ҄Ȥ-=[{niAZfp*$8zʽ PD9aG|< m_3#4F$Q< :⃵DdK2Bu L_]39B3>/ퟙ%eױGG[3nw slQwՙ|qrC$@]B~Ђuh}>T!2QB9aW_?QtazlFa\qzBױ'sg\s=O3![ceq>6ީxS]F4-fI:>W|TMh}<!ߙ#PtfS߯q8#O0IjiNU<𿍋'b1dF̥tOgr3_H7 ,Ofۋ7@HH(ם#ˮaX0Ȫ,2;F? U̻߻.h>x䘖2z$F혫&TmQGy2> 0ףX R=ՠN\ѩD>P~{ow_뻟ޞ~S_NN߽w? 0H$( 3N~xЦ8thaВ@YO|qU)w{W|Fjs?rO_7#oxߜ .r-#wͶkG 'Wa$w>ަ y_Fhk>H7>h^ߕ$Q>fb}&d#H!DϘJks2; M N4p@ܨ4TkslmΆʫcd3wmy* I`$'{,RW+ktwgtWkF#JjIb=fǯȺBPfKNzlCJ lX 7F4y9r*_lvP-/JhlzEh}P!aP8 ,jYzm/-;"hj5 J٩VkEmd)*JG]m* bq\c¤1yW8w0*zP,apY1+C-p3w|ŦU8_Uyp⸰7bGk_p_Ni<0C`ǥ%zA&!"j62–} |/z D/t,CB1*\CBL^0X,U W0h\j=݋:~xoI¦iZ[n/V~uX9u (?n;punxK@!b/9H_!O~Xβj[G[ ;{T=R˃Ӄ |-/y9|js}?XMilqPY;/nN w/7beI4k"vP7hzXFo?.(:O (adzSU6  --V/f&m.=*\Uu ?XK] 3nYaf~YanFYaneJ{*4FZ!UJ!oM5)'JLd};YDmŘ-ªGUЮBu:i !d(yx畭+tg p T-r%'ve`VOZY㧢ky|,(&Yr1JwS;{Ҥ)g^;I j kX*;.)ݟCs9T W冐/#d:NɂZ|O9O>S.O"^';PUbJH.|kE[umR1/%3A:g.%7%EiN~K89N,]>~(6w]oo3DӢ&QTNJBlȵ$Mf)2^.ϯ xA!cZ۫$lA=&Ś5[F/UO9!@w&[6VUp6b0aVG,-2#EŒzAͪf*M5 3O9;8|LBς< yN8D%'QAd XZteA^]DNމz] p>Y*>RZwFEs).Eh9j3XBRbݬzc}1 cM-&lvWу842M'ޞq.ėzx3MZ_q1qA`r:+x _9u%:5E!tlh:U0s`uˌ=Aś|jAT['zi;ٗ/chV<5nk]Orh#d `^>5º8m퍟pШH/"s{Zׁز({{(Ş ] q|J6P7iTgiwW׻KRV'}?}Ljq?#3f2<˿U6J؄=O wxcܬ~.ʯDM=;o~#/f%E87vhhutte/T/6hEʧǭNeljkz_oL݌w3b[Vk5 qh<ұC1(:e Wol{I? `o/.d $1o,@W!w6nНm 3΅v̪G[7=y^Dq{Utb<ɚ&#R'C_/]Qy SF90@UAt .TkʤB8b;*&ceR)_ Rɹq$hfl0[: (-Ar{= Pr5+Zg;bSbiǞmUgԦƣx`>(46k";B"]FVGǦokEcNB@ 2^ '2e5*ޙzESKƸ, KF|xyhP8lN\W1bis8pFc;K0c=^o07YFLg^RQQV6BDSq(+ZzQV^U늲(^/Z:]pin2'<sRf O#YI&liv/̻\h//t=B9JFJbsYt <'T.o2K X06QJ8.Rf֖RJ3ӌZ^^ɲLǗ/`8_v'PH~%ͯؤ>'N?`FX9hS;&4LX_nh/R#3!\H/>df𤞊29iIZ2Y}a/kCmR*.gT= I]JhiϹ 'RB8K=Ü.K*E #,Jf4/XJ,p.c>n|o6g>q(?N(qPkB0~5Jwny߼}mAu[oqv{Y/;P5 *&㣎\ol[&he`5kC v͟owޑL=&-ֲoU 0 +Ēd9|cJԀ%_0lǒG3H$TtAȴE@Ѫ֗2h;H.5WBFWRWʄWZwqE-+S~3.ggCwNnOnTdO(d8$/GEn\+x7j+V> Y 쬼8~u1\HPXe VU)(ǩ|L?U53y,FW:t+/x%I9Y*UuEbQW9BPPIoIk@ :mʅ⦕m9z?H^ԬʂM빮0"O%/K&Ppf 0724%:Fp4\zv3BxD^ևnM XHkePR*;kք&Rq\<B\b+"tJCU'W-+C1YWlFXKXr{VFq"#+V ` oЯ>r6DT9Kj$9;䛷{GdPOTmK;'~x8M.%^wU8Nī{=ah2ގ*{6KadYr!_wr_Ӥz @[ Jg6E-*ءioџ{~ùp׊za-byLթj݄H&Ͽ/Z&oCOpDLWsͳ?{{"MsT;L[Li,>}ʨL5VG4jH/E7ՀRҙj-4$QVF#WI,r6x<%;j\INS%^ $B>R 0 E.bX.V3-]BcZ$g]Z!"w-v"ͦ{M~$/N42UR,ۚZ-aÜ 'd]W:yNj7ewnfEjkjHi͕GiB@T}ÅR1AH'1l, ^@\MҚcׄtVm4 6GLZ:q.W@Vʕ>i,оre x qdZHr{"FJ"oX iEr]W$Wrˮ}"r FlFˏxӌirr%ȕj׮RpI"+4B\&c+R*U Iѷkk+Ć.WH:j\qEHx+,BZ\!1\PЈ -GWX i ]vϮ(WR2mũ䖡UcT]v,ũ Se,fUE\icYUX (%ݪUUf,  *T3mƤԐ R4L 2R2t eZk0Ycz&7akT,r)9䪅rea<& 9hބ!bRU JTLaL qE4V )mg]}=rz'PX>zw3\v h =\5r:ڵQ g;,TRtՓ3V\!j\!Pj:j\q/×+XWrBT,rL.WH)X'W-+aK+e"BZB+4ip^F-4J,zFhJ-Sa`.5()ugv2]U4Xr,>wAM"c] ehA؝$ tWOJ&'LSo&LU&ַ.Ӛx >< )y2m"&B`K+XHUǢ\G,4* '< qU4 (y-JoO(?3fU3Zufe"r;ڵ)̲ Ukc+3]#5GrQ61 trkS ULrOQPr"WHyrBtrBbB 4*bc+e&tBJA;j\qI$Q\"$B+ʕ,UO` r#c8I/gIp3^m\Tjѻr#vb%߼}8׷ئ>;v?B6xm.'=DeLg7YQs/ƗbY%$wϒ{ ϦاIn=p PVé.?,?FQ8Fˁ vhum~Z[u^R: Vafmkȕj׮!"+G#Wˢ+IwLOj@(ʋBh_XgBk03%s%ɨΨ9+.gaj=2=5#W'ef0MV>vzs꧗Q7,~9<έsG7;]ďVF"6N@jSq9OWjyYRXowyn,.ϗ7%zQSOy uc~9]_p\CW05K`f^>\<+ya3Mqs)̉V ndCHs8w^ʂ?Y?<<:18*}ݗl[h!k[Hz X2erDž y/z. =i7 }c8Y$?38lv1L @ 耎/g)fil0bzL $?JN;GвEֵ%W`B fI7ʔ).A%TBߜֈŌH''w>_$o}x8%1^ye*&#<-iITMp~ħ+`t9~EP;$:F;cޱu^ܱX9͊\ ͽ&.}o\0 /8LLg /XX|Oj»*)93SSkJ~<Ηpd.O tv#a>]- 7jl }\<^wn8ARHیI"Ab:Xww TQGfNdr9֦*t m>CƝs$s\o5oo.g`ϺDQő>"Mɐ5Jд 1 *= \9!RRbٰx6pQ¿h)ŻaMʩol*W'⛓,6a˒[8g68Jh 'xբ$gmW'O'ҜMUعoA?Ue>'V;dY͌ԯW/Zzт7T?VF+m9RoXy14-|oO7/NOPCx-CòPwznj~j>z2;+)7 k>ïEi'o__^\j ͚wOdN>brWeE/?9}{z>Ə:O_,8z,܎~ˏHzY@DıbvŘPňdh,u'B܈j8G<ذvy]o=c ^v^k:̑%#ehQ|$ @1+hMBH")z$L5At|oUxH?70N܋3 MkA59?r])+;WZP޹n U.e贘{m;wanَȣK(JuR\VKi! p^h#F3snDܴg g[ Zu-5b9\w㵐@ 7L6\**2ðGӪ`A ц]ԅČ.8BzFJb(q5<ۑn@U\W %\qY{-PTPXSt,R-eeJSNiMoBRc)E%sJ f哆TUZQr„H[V_zǡ* +a.-&MFZX0ZRhPM5'R~8ڗv5m 0#k#|.EOtqNؓWαhIӘ4ՐB/\llD6v%i oPQ& p0h P8дVJgc:VKC t޿DmHHSˌCj3 jl--IlA 21TxiԚ@o a0hTuNɢS˄QEV:j3g䧒cD,*T> ʰ!XU0f:C!Rz)> :^8 l7Lk2ϼ}_ؙZIHd(ƐU9*J @ 07k߲cy 2iFHM7(z82q^YힹG%aJ}eE jMB9b@ZDX@jGU#yѶ[fGt%agM~.8tuBlh9ZźS.{LjXt9(ׯ]>0gBkZ>'+K^4RaCq$ &P6zTD$;il4XW'd.I0[D!{!(,CJ%aCbR/WCC;"ˋGֳZ.j٠K>-nq $P̛%y5&r4B%@]0M,lPmV'DFٜp`qBQ0oV0.P WUΗsڢcڃi#U{8y ]pPyC>zR[tБ@"YC5flQ!"9aȂ ],5Īa8rKR&p0qÞԯWJcGL?#NqoarH[tTl%&`e-!&t@@Eha.V nY+m)8Sɔ :,c)¤Hv#&aw,:_\ !)9Fᡑ4YΤ}E#G:)L2QL`z(qHc透Úh[g -.0Ftgf}bLxj xU@ =`T)Tr]t֖v`5/ֳ&ܷ#O?*Ǝ) dF SN:x65AdD[]1ⳇS1FG9{2"Jr(s hf@cfQhL0!{]ӓ%Ň)* aUY]UF}YeG}Bbn ig*wllg%\Z;[|.%0uaBٱPB.>*e O;|{<2E@ŧb r[cB,j0`*9J&FӠvTM6OiQda͠Lu&СS=:d*E; &Αv6b~{U.LVb6 ApIeȶZlP6уhAJLA)c4Q" @"P)hq ;7̶vDPY,3{#ќLEmm1=h9!X$AF v,(@0=+cw:Ӑ]gG'Rt֪/yav޸NNDɀ!e:hbO4t#6sI(ES\gl{DA|?-^ђ[LruVa<%»e;5ji\ޛ?q!jXt>U5_kfsȦJc !iud#.ή8M-W4:{uS>m fM\ `ƒ̐`QfyRsHrd:GtYc:VW'O/~h<9,Na $•q,odDž4J}·K. n5@A:>G3ؑ~S.jʵ+ث+3K;gO FWl°eL[:XItңokCt%./ڋmiuہ K`MD@*HSq Q`bм4ϗ6& S㎉[tgÛԽ 1$י q/rB$ ]L\q\x8zKem 0Y : E2RJ)&eb̩kIQXg+I} *;0:z>XPRj"=3As:% "!T7F+@T Q8)7פU&/K~qXћn[EbLG24}c[?Nq*iB4EkqؽNR%WRoDO!߁h,̽ HuYُQLSYuD>ؘհ62WE^W)/v/|y/{.)NQYO\Nb{~jqXi^RJ,.MȐ$ pg۽weɋp ɊCDDwBw mne Xw[^,xDcyq5]v(bݨrY-ړ5xn!l=`  p )~7/tQE/!zg$1/Ђmz<4 6go`:w0aZN`´2>UXP\JzWYZ~pjgWT* ldo Ք,W=+ =#$0PB(~=I˨zzlWuvz7;+zWY\WYZ W($zgWR+ ,n+P}ptgW cXS JFPẔUWB !\eq \Юps+`)#B9U? J+(9vR|pLksl%RE]>2Ь޾l'`R])ފ;W&Y)8 Q pt5 QSN֢;v G{![:,trS0= g` rU$Mc<+Cg)3^BMێmf3uXK7^Ʃ1Fa!$pIvG|Eݚ^*.G60nYUwnU##Zʻ;fsޡ[Zq*nZf^~hR[瑻i f'_qcd&n;O32s:E*5h`JgE긤^BRoi^H ݐvFs\_b8U*a9R\a%KzB M2ur 8c4IZ0蔰:8eNP9iT)v!sbe rHi?ű)0JnT*! ĘlQBeQ[e6G bf+CX"31-dҪmL451%0TH6#(Oր1*-vpPEӏ/'hRRJ>8ƥJ@(%J CK "69Qm[}" 8qc6K}Ýg) O6;kTA|mh,`:.)&ӠPqUvIqC= 6AeԹCCpo{DPRk#4*6U&)+6KSV 5HBb:AC?ه[7LScKT!XZ% sx9V!(兲QJ=n@T蒐,XocRqD\JG$n4IR"%Cez`u]]sGv+(d]UTb;?xw+k!꯱SeR9wD 98l3ӧ[ѻ)D$6)hK se_'R:a@"Үh#U>1.JGj%6 Cb1m6eAj I:GEէ ( ׄOQdzk,R`榤gE¥3XgB Qt =5 P]j=v4#H2`4ZE+`H9fg'`UEF'{'0O8v=~Ճ9v3`BQ dRvccۣ[5۪nHWLFE# dBC;`8%, x2&d` W2X h,Kq6PLv+u z\F[`B]Q[>ಬT ++5f AXw[Ȼm3X "Y("3mz;clQȌ:BUnYy(`1g#n n!6K17SL3HJ vfM0(vkq)9nQt.J  P Sѝ F4GQF,T RQ{RH6 NB Aꕶr{vBYvʺ@J @/[.EϨ`3/z ($dA*k5f UV+$1^ B5VcКe$MMd,mm% 9=[GŌUDzb֛H1&@T1yQa:I !L&MY;L+'e;.#~ԂVkW f=f/>Hp+I|txK Kq@xc#XYd :AJrIJ%UA 0 CM𻃵ˌ,$tF^8g Z Tx" D&jZ5`A>xmBV5$ U;X1F=i/! OݗUe 2inuG-=*q )X:?*j`J4TU;+Q8mP]֊ANEVEhӍ~>?9kK.N&!The1+xm e,!{إ)྾ R{ЋQuD! %:.h 9>5ԘiqCHUf}JPD;뒄 X:!t kT([|`5DBrFP,F,-;#iﳱXeg8n𠗨-CۚbC_kW$vŧCQ(;E_J{8}f:2ogʃdZ^Bn/A[Z^u\/4ZӓwmŢ,~Pm"ŽnH~7[-6EQw+Z8Th^ _ -WU^nCD?tǵa7_\^hb+Z.G/^HG])˯XЕ'˽ifoGۿVKhzwm9{NơٰTԣV)GJ_0IDnV=)Jvp%4\CzqDlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\9 WrO 9h'7\)`u1\-\[w|(xb'\;$mRe2\{C32\=rMpgrG6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\q W WcBLph}|+bh Wpe Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\Q WƃMpet Wb:+F$pņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbը *[ ^4;xyJ+zՃm{־vbeoJxm"? (Z6(ճ+kUWdby|B0E:yv99E"NjzZ%l~DG:zy_NqY]yʯ.K[vokT9]AeдxwZh3_~^Ygg:+V؋Nk;b}G;,e?BYdy#DTU=Gep\UaUu  6P EֺfWu*wnB;Z_0-Xi5`Sf&GB,_9;/W ;0‚RwjG}Pj:)҈'`Ań 0NIN<,S9UV)-t,Sn2( Q#\ \!\/&UOqgWjϡW"J!?[S>SOWwrK> Rw+puߡ\!`mdbn>t+DibzpaJpbL(\'Wcsbzp|hAj盞?뿣97&FurXOekNټl#]|uŃfӖ,ЀI=МUU>neWa9G癮;:^̓5cl 밾@w˜ rG!Z}:,-iVˏ^k1BU8X#C#э򏓓>.n=n-ύ1ni cP # 5:>3'm:*qmo ct%]؏^}7>"^\ϊ-x- E^la?ߙ|sB)>\bm'vMq .*>C->}늳 8Qg#{l{ N4E*2{DO o8CNg/l 5ils|cʾnizb~Һ@tfcbl~_4>wQobq) t[ՋOXKd-fDnJzg=O`#)ܠ"ୠs"(>[,?Vi?SѶ8;Yc6ZUz{kO}^.؛ʾ82oXlYt5QUCV[oL4oPOSm}:SW'}*h4z l s5 }~ܶ bmw}5}7δ݊Rr\-%>K? g"-:unL eouޕq뿲Ȑ]#c@~>ɍ=f]Cqh- Ku|U]]gNH0hև6-[Z9|N|*>Kק^GL>uwo"ؼzk.~;ySx) 1Icjٍ=E@QzBz*SoHċbҚsWMPj^0Vr*[T90 9OB`؁{2SZqf!5LH+"g^V?n|R1ڥDZPxcCAQν2Q[ѲĴF(|n9+zS^.o}4]B6ÖS泳ud/ߪW~khnbc7vU;mnd*2)8[rTmӖݫ |u{ӗnz8U$sp\Ʃ9п0Qi%D YE†Y*(@@NDbt fū&|?.Yfb Z  dw63Vd_hHHFo^w_y$KsO'Q[X.[/l8b9z@ f*(mST6lMC/e"HLIX(@IjKRȽt."b/H R-9*"d2 ѹ2ϕI[ ]]yGjiڙɭHw.>ǿgaB'g#jlFjzM+D6ꈮhoשB5fӦlGvuՑ2WX^j[_m{H>G1l<uve^ h 㥌D StaZz~caR_HW6Omx~;q[sM'%"y=JAh=KOʤyQ84~M5Y_?} m{8Cg<g`w2ק%2\zN#焃Z k=ܝUB}5BQoIe?LᠯVh}7v&f0 -'daP`S2!9}`= ki{RZY6q4xMzr'#Q_oJCѬ?W3f?j_Al}~,D#[d0]zI8x^ǖے;.iNe :Bi;֊JYYE;wABlT"N+e,'3%`yxy8'A| {I鏤{$(A+Ϥ XPp2-)X={|z=Te$S!1\@JLsoDt< 7D˖2vh_>2F, aL)C@+B=IK ǰ0BiGZ8 {둌zLe l aۂVQ}8yin @R_P B0/IVdB $_az:cKBw$Zr:sWiD̈́qR& |]~σd<k=Jw6ýJ.^qj(jfi;J 'N&\o*D&+BF&-^oG?U[Wqrupe;^O[_zHGZyȶ}GX& Z\iTSDa,i R=Ԯm0O`裱&I#o-@dŀ!cq3rnw+y$=n8Xу;16 &a6L?o?:?Pb;rUh*M۩[֖!_~M_/YBX,wAȑ~v}8grbӭBGqk;Q8Um,*"%9u{|H=&8{4y;;oA=i΃(z =vT 4l4$xM:'e4Y';#rSH0U(L@QfL['5 QyrF$0sَcgqd](t\郠a{aaZ_j;_U>Se~J&.!rAkMXIV)QS)5ю@#DW+tٖmn͎@tCA$KΠBFg}e *DZ+;FsF/iU< ͮ7ϵʭ7IPB;H4l@va%vMӬA>^5wWIc`VYNP#V>3^ JFs)./{D ޱQx:($ we)M(jlKB2nw;j)DM^$D>okh/P^"vDYthN}E mr;@^̑C_b3L`ƹ"H Bϵ'Li%-!hùKng\I^<$F$ g c\$ ꐣ mXdJ:3DzR'Ԟ+xFnIZx&%mI0gEr9wUS4vޔİ":m􊐠BǼ" SJlVpnDb$ igXB"Х\^EZ0IL0,N% dWh"P.{ Q_o. ȣ҉1LF2PRx>SL:p&1Q2WNψ@ zW(^'~'vwC6FdΗ%bs$` 43IaXɽ{i."B2%&zE&|w@9+uڤ#Ech5(H. C1JʩRDxt'+0VU_3yt`ҁ_cb6m;葪~Z<{(`_qLH'c(P\VϚ#eEkT<@*v| ep)#T5HTko&VՋ$gZI,ɝp 49֓Ղg&roM4!%p! ºY.:6#Y!Y f"w]a3rnO#! ]mh>U#y泒{CyYU_/_nZ@DޜAl >  y;s]Kff2:9`+`akBGO ߠ{pK:2lekax9-[Ė"C.Vy\ (T4נ_ԠR~^hoY=#~ԍ#o#WmjI;Z+jϣӸ=ch=l;\\`^0O,Tj*{zNߝMӧvnW[5ʷhj`bΔN՝67u%*w2Jyqxջn$pީ[r%O..Z_֭W uWҎr#py_ V_нBkk݅Q d\MG[yuH MMQkvwa!ېrdy2dw$f٨SiCr}ѭG~m4uL.,h@à[b?]Cɽ(\Do2efÖلN:쮱)7NƳ u.'] W]=g%N2cOu6(*2qXVbY[Ej͍¾a[ŃNM_1캏ӵrg'o/ yÓd E Y̳(ɴM<$Ecl?T%ZGi*1KcգO5?vE^xXuȧwR98~pnM]?\7xy+A"gG%NQ5~.DI4V'XCPppzaD6J,`ϊ$:Y MK+|q7&j<# ZJM_ʡn'KI,LɓmJ#rQhu#'nW%ne[͜-CxDRߺlo!/C:"FL‚n4@3YeDS B]L[sE3 ېd6@K^~ɉCBOD ` reZ3*Ojlu3hDKӴOqӫVmJϧWm‘\iaݷ +y&7k]Uгt[D@ \!:fE )r(}κ@T{c-sW(sě6AГ %Et)Wx1?HFjlFz\Vbz½{\@/ 3\\l&oθ9w~>t~6刭"Q1)NXu+CI;)Q9*#%%mB"0"@Fj"\ P#(zQ 5A0B6U9̂6C2g3b8HCAjcGV]7%)0mQZ kn E%1-O LO%dEY*'r$ išXP" G(H2Iڙ׫[Q(x(Xm|슈."v 8 b_2Б L䘤pX*%ĤΠ%i:*I2Hu)D6+!j>L*)!HI iAٌNu'\-i(2.\\9+J  a ]8CNd"CQp-XAĉEa\. V⡬r7<m88ծw5;=.(Zg-~4+e>db^F7 sj4m|,k$*|@$m~;IۘyD}8h ԧ \X )ɴԅi|U}x(Uy{72SSGH&ӫGlݻ(Cqǘ̲'ljϣaQVRyCӜEkb@q| %ZZJ"DDgy*g0&9sA˸`ѐcr|hdAkA߷7~}n4.c4:GNx4r"X6=65r}ݻ}]蛟ǓO}b}!{G"+#"+=ַtaYr3iפ'@W1B z,כ[+AvͶپ ?(gR}$X2ctKi*cdHj=U1䤅狤Fo3# TB{ޏ]%-F١qgGN/c]8ZSb+uN.1 f+ fkOQbq68kȇ󁏠G½:fuv0$a$H^]tizBxi"7'ANE"&Er"RfDaɾ|{8Av_u3ۘ;IX& S"a4Y$Dg!2r6i8D&.]O)@!.%755Y;I$մyU1G=X{V{(gN3FQt` "iyjGV!<O1Gm#uPɜg$ b`dБ(9 m7q8ɂ/|PcB(uɑf DP%R'Xѻl5"/o.g;#|a.Jm/w9?]YaOI>hNZt2fv!L;# B)o,^Q`pd!!d9JE\fo*YΕDggOgqAI9g<+d __;͇!Snpa_S,BfAY> -=' bhELZ6)DЂ'Cx0(ՠ^91Ne1Ԙ1)"oH'Fs $Q<8d*Wiu-Vȵ&}j=: WK5AC.?+ntt(zjlCe+35 dC%pv]~QM͕w"yP_}˫7uny1U[ӳ|C㸷%/XS_<4_6_P4lUx{)[Bfd.D,;zW#c˘TH} Z^KZyY`gVK0'eɣ Yrʹ+et>1Y.s .Y^(]{F\Z:6J1vzH R pp=Lx.2fт4˩G&R. XQLrJJ Ԫﻜ`E>N3V ٻF57IG`Ó4M'< G><$1qvJ# X&4r^+SO|TT:A\meRќ X1 P նuA=iW+k)+lmorYO|gUvm%r#p%,z9!;~g`J>O\r4g=)z*U=[X\W"ג+KW?#]d YPpZrLWz*M]"Zp MW(Tpj:P=NWS,+U*B/}BNWW-r*^ZU狟*`rNURCت*bMUܢbM1 89<Ӛ@^suL'h+at`>EFb`AB+T:+TٵW+K)) S4xWQ%=>\^寱S'ش=`WK&-/JѱY{UWsRe8+&UK *\Zκ+T)lفk '+ Uv-ի3.MW XWM j*R՝ JwOW)HBF+P{- Urq%9ܩjW(%]Z 4)JYq`D-~-`|1ͧW7RJc^_z;˷?&_;P;;丌N_a;=+0&fQ.)K’π\)c3ٿjsopOAƃyP22<>2,AyۙS. ?l/V4smG4d*Xez|[ :jz1 VX\LVhK0c7 &{pi2ZS˅/ oU/GTяaFNݮlwZSUmSӆ|g|5.$3:sS&9aiN>ͩAr*m2G gɜYY׿NUʺrz_y8%vl ͈P6ff xE`rB@oU4+P:AʪX΀ƨA|`˦pry2O9WRQqeQ&+l^!:)\n{*Lv>zTr["* NW BvWRW EGDӁs mk%גWSZU=c5p{\=詤фp r P eYrś|?_7ƹ7Mg5?PC( %ڦv?swu+wDQGkԊ\E*g$'D("!*zޯ&N[J.__B}ֵ3^ *+ChFm#-.=Ζk`gkRaJܥzqMϷoB\9HėVVL92Qx呰+bsŸoRڑi}E,DJy}v5~u9;!C?-qwܗi^Τ<4O>C3HM'߼"7Vh)REl~jc;\BBZ*Gd$"ϩ^{. g4Z|ՒMFcu!st+j`Kv"']_8-Cټ̮ňdP), I @c(<|*΃̓-!1͂\\PNSmr(\5fb㋭W`K5A)`x7ӼLUZ槵"jVx tvum}DPeVQȠMdQ_O\(2sϗloz'ytevM aKMR\q.P*x!-\"&<:bp5!U Z6Bzor*Oe;1 P@Gپ 4Y?߀Nl[͉4'%BiSA*e%Qg(B(b՜U]2֣7ge#̈25tjL.&l晣+T0t ZJ"=4THh%͹"Ȕd5T%qH FG#X('*,.&3}l"|@"8neVe,t=U񼸜MN:xi8T)˥3eLhu4R& ¤C"RGoHnl{vuJkDVR c!1jhp T@Š+5K}cecWjGc!AذnzOf'd#摝'}yUf:O;OCu4.p;M +$4~`М(ǃM)a|۠?@Q^L|&W_0HLexjuDj11i~%&.]$~.x>HbK(bflSǰ\rXtOERg\^LLOT #MT{rȦ1S: M gF9  ?x8 7 &|~&d'լlu70cmKa/h*~$mچFF/Д&\@s1DXbF-Vz]h QD9>0|+ϩZz(=@yT̯zR ޟ`aC۟\r>{Wyl~Bh썇zϝ击Ԙ|7O>k\bq5;[bV˳{{6G.'<-{rrV?,]M9b;]&=Ќ^AMiRR+K]XeeM\ m#8y&Uu>fX/>?AC}*]|^Ve=e=nvu*~N9]ƶQ7?xgYa^Rv7Rϴ*('1EC3!SI8]YTW SZZv,"ZIRXa$s)Qh|j"ˋP[[ot^KIܺI2Q[UQD%,)hN1AVAy<=/ nCqg>~ul;{ϊ2(Ӽ|.fK.&c]8BzQƀZAtAC}cNgܟ4#>dcƇl9Oo7=+q{x>yAcѪHj2 9(yӭʍsNTNiaEtR5Ґ sL Eז p}bN x " 4&鱀Ҋ+2hH?|,.  lt?M>{\m{<tζgFbloGxš?תWmfm ̍jd GDTGV Aq|~cW}Ԧ.iLn|,*S6g9R/E'ЁI A٪;Xb}-p`hPEY.  !J.U\-fKKӶjj|t2W)&qq (ZBTCp5jVfL*T`ijDO'Xӝ!6oo#_AtzeR<=.q+J7|AeӳGdTc N,Š7 ъ+v.6]laõt=.g׫4]3$m)~VD\02Ă.(BlT%H hc Djk7AFiy)^F(J2eHF{FC9SL2WݜaiUf佳O1lyv6߰o3íl}M=^yߋqSCt)ZL>d R0`b(7$%0ֻBĸߎ xb|-3R]&+FY/Zkfl׌J3]lf }c]F]xpK[s.۳Y.n|1Ͼ-~rmQe% J s'kQz[ze3TAgVfmE{]=$%tJWq% 9ȱЂ!ؒrѢ[]cٔQcfǞZ[Z;Z{D݊9/" NɐmH0>Y1 5U[b"3dFE'"YO#(#1Iuv:y3qÞԯ,b<l~e[oԈF ImpZ'KIF!@V6 zT(qɬU0x)u82&T=)&f+ξX4ֈٮfzq?fd_7V)dZJy/B~*  *"8 R~ԋЋǢ͎}i~*lSѫ#kwl4w0i#ds~+lh4ae" ֠ZuvBM$?;y2wtX%*Ga! dMhC!IkV':]L"yh U<(T(&QJ_m oX`D&KkJ]z':~uql٣j5x _lrm͇1i 6ׇ.ČxmNw $<:8UJ?&< O !z'UΧӦgU' mȴ`;2Q}^"}΄."B(LS̜4Z ё$鵩1)dZ2#Ic+ H[XH21եufl1JlƆ} u(Cg7= L퍘ZRAh.:o"@I`BwB_I4dJe $By72ZE{!=gK>8IT$5oڤC6o0[p&IY%Md>@t)@!i;]h=7K=eq{x\4O5ok'糺 iN\(@74<6wrc~96~ξMFOqϋw7áf;?X+p=ף)ODd@{픾w*[pݪۖ;#{CZ:7aR򊨉-Ïji<#N&f\&1]{[8;odL ࣥԯn1uâ~y;XigEK)#cb4([ V!^gI$H@2x905)'c馆ƤW99~Ti2AD>jmLpEP8;[mzDfBួZ䌩ьvW<$%5sUջxJϑ}3xJO㢮1R -.iT2$1=T"٨ !3r> j(}hÔjd#u`f8aAq"MEy}&LDnv㦊f8$wxvi~orCW0a^1xi2i3xtҮXTv1PmxWd֮Ts՚q m"[0GeZPC'x61F z/0ZwqM}ySǮԥM1G"I R6v326]]tuDiqMGfv&DJF[UMÓSd5e9QTҕɔAX!%EosxcqZ8oH޹?_ϮWB쾱*kzSH^.C _8LC6shGS+r"y%AͲ}X Ҕ#6;}&Pb|(S$UIuεfl_O⬟`g2drݮlyS,3R Y;TU X,`ڀ9ZQ 2_TLЪS>? s#sԹk.sn.wj˯nx=]bk7۩n"W{X5=$߈-]VWg畑k3O88~;_)}g^vB?-&8o4ڸL]?KJh/?4F;;Vڒ[ʭ\E%./ŗK`m=Oj±___ֱ HMX()ij?{W؍1_vۓ腒v ؋vH4Nq2b_$v^Μ}-JH|z@\x>߽#9B)M mTT[Elt[Gdtxom% N?Jh7d,k(&yHv2Ӧ=tr~]X_( P40T4u4b jtRTX +୘u~IǗ ƶ==꺫-f%A%P֞L[}'cȁ![m8goq1RyylQ(A`HG4V(Lh+VJ~R:o3Y_2؂d/=PҦ%!L}FZ^&JN8#rRфؾ+IJnihUs,[H %DV!d0Ye`jRe^FQ<^J5CXʫW{,XлSݥтxt٭ ~0 <] 4;)99G4~hLR{N9Q DvjP()jL|pVI;a4'+|ɵ$eHBXpm4. %u;Jmx~rP^9iӊ9&^*|_v/Nvj@&,g_~w҃!kjJaљMV ß./Z}f[bH,KGxeè0.fYޱRWbOLzt5ShݣN'l%sE!UuTϘTFmrePƮܑ!ԯRgR<ˎ˃ttvVGwo?pwƒq+pSDO&O"`NY>[C#] Mmt9ɂ?d\唗{[|Fj٣4r[߿ҷG1!=W&{GzH~f1?i-UItB A@fzd}О)l/CzkϲvͿ?nMp?S:YJ%]F4{,Һʤ B>gmiS)pکhT;Ej#xHB9# #/g(Bî]#tE>4tvz|6ϗ=?b+.Di2>+%kCH!NEkg,xNw9M^R6GR2,(Y p6QJHX+dxkɄH6V@aSxzF fd)tL8zTX-3 TĆy Őt!zA9\YMmϨIkc֚xCd`\R9ԑBWJ&Bz :ꮎ~VښZ,/tzk4BFcjr+ΧƘhvdҰ 0MhT=]e[>INbz)1įsvI{4JIV$Pm iT F' ^,dE@_)z40Y M}1v茜- 5~zQl4!Z3Mb%P(G%>ks ͱ^:Rh[Gs8`J˸9;8Ta$8.Rw;j@EĔY%Gj5){i=oJ@x%t-ێրd sފLi':Q*+2Ypʔ|P伒;Lt%Q5fj-7u}Z*n,d!*ɫjD/dT>jF`'띢mD7XxUa9>ܽKCz5uJtCEϘmzgiև-Rs=jc$gksm:+G1az_b|+`8O>cAH'cbG PZzW^}[TQ Ǩ-Z6R1\MlT[` U%|uRnT|KTW& Bm*-Rk6ڵRWW_-RWL0n3XgR`w_=!};MI+_R]=\+_P]=ZHBuh*&YW齽ZKei ڔ ,*[Ŗ75oW"`zԞGEu#2}/ H1j7s3z5[T\fզW߿p:I u^|tq8?* ?Jk洔urc׿,P~}\N>H$Wg=^鵕ҎN-ՌmM( o>GsS2 xŒM47E2O)[!5W+Sf&_[w{]X#Nrj69;?ǻH ق3s&Q0Z4/xR k59 -M}lkpREųL)X6F*MHڇLTԑ/ mbLR"(="btO9.]tFVM-Ihq+/ҝoF̙gwf~n}u6y[ >YK +Ѡer=¶d5z= F|wمI _UϊL)X_[(/HMox#5q 7R[l؃*0؂Y2E8$YDm)B2oפtrP3mڜ~5s$E/bAe"O!V2ľB'\|"t+ G|퇓DQ Tȡ^cu#) {H.Ix۱tFco͝|]J *+$)C hld5vLD1*CX.MKbE/Y٤,5Q)d4f>I,.3j mvV M<;lx]z(YZ&XG}_{M^|=JZ] ->LRaZ6즗3Z| ,gVP\ m>[좳L1 vCcPQPLѷ{{%3)QcFHh-lb/0&R>eb ry ϫ_ZuaZy>SWmn"FڦR,0 =N0j(C:CTR P˾Q/Ľ$DZ&iJK|-Rhepv5+Zw_JH\C:=b"W7ޟ Nx8(MM4n0 IZYO̦bkʶYV2JNQ0l9QTOOɔAXv0@J r};s§+ّoѧ NP^ {{8C0TB4.MF TDB%AͲی)c6{)A 7)`%f &%Sdu`*I%3rv+Ӌ,uavmkn9Ж/)Z,_ȳT  ?i_ ;(|XJ6i|![p|{_h{q=Gv %*dQR1eKJY:"$b Rda# Kwɵ.琳5jLJ!F/TEyW.$+mHY/Ƣ=mx`=E*J:xH,^*I!V22*3"2"3 )Fs+%]'vΖc n2n4`d e8.Yk9)XzY2jz]1 ybvcRoI392t oOz<4X!mZ֊eq>1ZO:QP\L>^%yhCSj< @tf}$3Y9kiݞ}@yx3:o -C 5OoyHc$yZ ħ]x53l^iSש˗;|@(˜Ys#(:Dh/\d(e=.`;(\ ibEcA8kCX@9 J#86v(joFI**6>P-yA)ıH1WOpׇ<]9v"MMÁ; _;DBDS,t: esLvQ f4BS51/CyKM׿9/v!5p4!R LueE8%6Ph&Mg:_P oLh268. å;e*"S6#E0kD4V tX\W=ɡbPO!!uiÎ18b,$Iy9(M01xut.y%}alہ/A 2]4vHၱrmP&qx>1HȖy5a`p{*Ob VZar3&M5d[c~b+t@Hmxc.*5XBLn tfoans 9mzZh "n.0o\{腼#1t|3ZH}]&=|24˄*c.W.K0L9#0ɖ1 fTza@x/@P>,8"rZ`Sm%Q$ZG$ oDFF[$[^bP^] hroZR:g'˹,0mBY[{J=0%Sj$8vBK#=Y7HI3?c}y%>^L.ѓTu}1KUǮ/J~D_M~׎z 9$(b"FȜ C9qIbrg:rgOm9RD::A&~4Ep]*2a4 Qy]̚ϓѱeZQa)!K*"T$>: H6MyaI(S=Hc2H1 F|bd!6kE擬·'_4w3gݱ=?dzoǭ[ ŊT8usٹyɋ8h "O0?B`]7`iȗH/E)R!viC0EhΤ01Eûb ( u^?kUlTE-6`S!atֵR*]6p_ę/IhXoT:30BE&4"erھk~[L999^Nrsr`F >ZA FP٫]_a8#\}Q;խyZ_-'ﮧpьK`jlH6~NF|to60!ukO鲭 jk7ևѸO0i*> .}.5/Gh할6/k˻*9ѳ^Is+H_!,c|q?b Q2`T=`r'0G_>Ӈw_/ &?;Xq#0 6}4 ??c;wZw5tMܵ@˷|~eC  fn DioFAQ4j'ejBDWK_END1#Ay % NQ⋵Ѳ7S_AF+:ZBd4TiʼniB9r'=QYq6Vc7!n8x f ckڢe@@=h$qSt:9)zr(e;C+[qEwҳnU2i;Re?uG?BX<]N3O?|8n~Zngå L $^2IEbKr2囓o k"MUS+t}i8mٰOfkkci:b巿+"~R?ix}654<&cjTT:սΟf~5>@;f΃Z/OfL's6a@f% Ƙcj2F@{ɱ6$ϵ,wJÃ&c`J?L]v0=ϮGpޖisj 0֜=Njl?]l;h]sLh z69|%q/t| eaM]roPGGYmw!mS_?F%Jܼ9Ut K3m/4- .o^Hǒ3Zڀ6.]Je"t. Tga5IQN)93Z+}&zG64u꽬^z띮{Nu8zeNb{Z/ia%%#҄,a:O9!]\D&l)9cZ&*3#A{˯V-k|ZB"Rp4Sjl=hII$Ycjb ۜ 9&r6@:2,"1rbrH!]ct ~ k,yscM1Z('vXWCw1_ńE ATΦ;ig3"+ #:孥 WlCv6w滇d5|ꭽgpr~ w+q\#M$T iđU&cpţcFG-C~uÁ]X d3+@>A)ˢq]Ji'xG%C90ya}dEN,|=@3uےF0*!1Z%5 ȫ tyisf06BKe!h8 %FmFx=-"L0m21IbbE[Gm5{M e+\pdR ie6Bo'놽OirZL6 Z(Gh%N^~UXf43`yf#,<.Aa|)+ tk>#kCs}ᨹ>0tHRW!C҆d!=tP BŷY_ftJ0Gg Eme׫N̔+Awbo5$KnMGlaIbgv|a61vuv|=rMgQv-#:HY s\7`k114J DjbZX Vt84P^hGs}Xҹ1g :"*% .<LCm!^x! X)WW8 fXU"?`D+P| Z(,B0`|8@]s.45N!9 m&6ݹRKqd`\—Ý:2ٻFrWJɢAp$Cr){X,r;'[/ǖ-K-cVwuSd(Q)ԑRK,SOH]He&ZmLxa}HkvrmŇ?i@,զQv,tɰ"f.A(!(d}B{HmS2QZS65ndrPI )m4"H EƆL 5݄ii0ҥCRɱ{W/}њ6ɳ{xuɚo{G7+tkC?zMQyB6S4| ֮}h$I,hRL'$ehKa,Bļx@bCONzK>* .VY/ R8#c; iƶX XHe uek9ge㵗~?; Ogӫo-13* c~%  NEe={WT^gQP@JHKrʓcЂCmIuVCaoH(tNl d 8 RP2k?Vm c]H)P{IgXCY$d82F!"S]8a/,*0 "6ӏmQEDq] JHj:YJP6*@Ym%m0K"k: >(7:Ɂ*g"ńm5IH LcDl&]桎E!u6ӒmqpqMYdR%(h2;Jl+&N\<DHA(XqPD{kk"²]OrW U@L f*rhƷyLZ+ gNZ jP('hr0v$cU,lkAubM|} ,[APD-ظoGGWwYظ*;8 b8.Z߄E^_]kE?i8r&VWf1 ,"߉[(t^pv]N9s0y@jry^\71,/vsZH~h"Us~Mߟ]T|W>` jQaWxsbʪ؝w^A0'(s_t|o`^~4ODv{v'EA{[hT_S2b]P:HwDlj]]0{Vy ۘt6^*]Ԭ&THLBMb; v2 ]SQ]{0pD \;RuG㻰LwR*1.ߡbmG~ٿoOG?,-J'h6`S0So~a4_N%~<87ߑ#%O;5X썽/xYZ'štRCFa l=*{,pUݷ;\U)W#\yreM p4pU5XJR'UreJn8zqzu/̮&W/WORZ|\zyk $ \Uq:[v*3\}pA,aW}yL2LU-`/IM嘦+!ͳFpBWQ)x݉"lSʻz:fyVv;vo]lq7·>]8 w1_[HS>;ݸ<|{Cыu:O(?|r'e^߽JAv+U]bvAPkKu SM򤙽__s\NGV>gqDyQm^>ó]&UgQ^F/>P~66?]XVeB4;9zOVًu#XUw N?mt;iWyJ׈Ywi̪[JLU}5o< r%'&=YShS -1R̉נl{hU,MC(?%#ͫ~=F+](?Ƕ][1ykV6HWrB!WQ$7!::@m} H-~$9ru>p ͼ yMڶ>Dh\ZV'p5>;9<¼kƐ3B]gU' Jk!(DW-P,IJ$GTT+gE5LY(tHJ,|nʚyHEG{vS~Oo%6c`..&? 煨w"Q^xz݁VdD$U:mtL.SA^3V|@^_P(STKI:@[ 5)'ܶIT9rf|R&FfWRV!OHKfuVlpDC%bo#(z B$.j%k[!bv 4*W'@ O2yX*l)!3rX]E>!a#66f<@ja |C\ VjVY!IyްBl)d=YkBDCN2>17_Vy&9V@ ϔD!bB7LBM.fgE_W7;v~)rUx 6_h ך; |3%gYSRW=[O,b@g;;AwJawrI\w'NLΤJDj[q! /8&&19(vE;C $vym *@ľPRfZ{z`WaϴF'U?rVlԁ(PTRǔ+sm='b:-ǯrx#;EN(jtK )"1 m:z# 0^{<Ρuu^=LfKf8gZ2un7Mw^R7u σ[,ztKMUׇK8ټ~tp'W<..~ޅ.vNl3L%Nc,eN:52g܈9 &3e`%eȲe}vP\`ĈZE̓ǂpl9׆jor3XX"hBXZW B>xh؄8. mC!cñ#2\*0XK"2 )X5&roârg9Ԍ4SuHnc{c ' %c_>JRW2v@2'yՉl^z.5h+m i;PPF }!2ҁY`7Jc1rmP&qxޓOI90p˖y5a``p{*Ob ZZar=&M]1T81F: lp6HFj 4Y1[,&7Vَw:#gFs_WcS09z}$9mzZh 5/W\.=:z&t$;2GK52'C3 Lc`sqi0 -}ΈŹuTeLZu5@>}z Ll,8RLJ-PA{e0究(r#ǔIFF>H|z/8˵\[/Z>RJGsqK&F!)S*i-! -EV#Z2vm;0/S|7Q%GqgYM~^YR)L'U^H9fJ3G+08E82: ^;d-N0@4:-$fE-ɾLOI$?' n_y<,BijwZ/;R0@[TF1|)' Ĵ^T50ٳ{hJLj92JS06A1O,7{h/H }ˑ43 -8B4rQʧ ๠2H1 F|rd6ZkE7',wg2;3Lc'1z |Oǭ[ Śf8 s܌<|ɋ8h "O0?B`ͳ` ,(w/3iϧHV` D0 ::‡qG(¥3PGa^S\8Mu!X֓Ksr ֎3hX|T<30C_ݩpMLDg{srr< "}::r;!AG@t6 _:qG.`Tz7lYu>/0`Q8j#AXRpg迿iuWCxTC ĺ|zwWv-JcvKn)@~~7(HlyZ&Xă7 UAoS^ Qs:(z/@/o!@U'wžhl$<_}19"J+NH15Usp{QYqm7!n8xE* m E!*ʨD{dBP-x?"E:xӑ@\)Uo![t㫡/ Y:/a͠]Z DGY4Lr۫zۧ?GQ |ICؾiw")~1ي2F^B˰[83>Ԗݥet%wRCgTzHryf^.ܢLrJ͙"<ȘaL8uਵ4ª߫GY=ڊ6]n.p8}akys!-#iJO'% kpO_ \'bkmSr@zM7UHYjp)>]E\)\7"X`<TIq5 #'_~a26paT  䢤L Q`r/K9+׈rI'p%p!,~YpoMW1Y{j]&y>-kVqX}a(uCu\]*~[.lYI}` |n6)o3騨 4'j/}U29z??=%`3uniH^tr}vMλD&l)9cZ&*3#A _kZת=ZDi81,٨-thII$Ycjb [ 9&r6HEudXDcZ) +/B"9}2@C+5TE49mHYi}-s{-]Z;+.-gB?UE/Gu6Ms͈D,d2ԃͯlC{Jd5|쓈'?݃EDh&* Q؜F9E-20QtHZ_71 ɬi9eS)OjUiĈ!k Y@ h$85b9kGk6CA1 '=<灐TZv7V(!b$JIyXqb[`YoILKT'};-tRw@9[(y=K ĊQ':ϵqG%hm$أLy m0h0+nx:;=hU=ym7)W#g#@E. +0, K(00-^QϬ9^[ʣ:6 Becmq!k D=FH >Q/IA >/uZ 6E][v@h";K]($u]0{j3PL2W:|QkŢRPdsE,R=@,BReWɯwZ 9',&HJ䋂if*M.ƟOdR%oeq>A럆 =_QMBb) ?Wyô8&=}Sk%W#t4oa/:8 a)ģ1g :"*%+D"h<ʜA`".SK+N^@Jo3}rSk!.KCݏ Iȣ쬙q~.6bq3mmz#'OIM$94=h\]$2K͚rcPJK {29+FVBi5lIf؞D->.&y&{!ħZ^?2|k͙B 4\KZ9} -]Hյ읝&?9SDT ]hzj Q$1":u9j?ޝ<%ճ7E.ߺJ}}yn_1E8 +|r$!ڝ_7ŵko+ۙ#Ω,Ρp&dnFLâ2FNʔb֑+Q^Kah GH`jզZJŷܛY{CiHFe<2,\OB[d9o&?Rde@;Řh|LP%)tFcͻPl5rԧSGUՍ`W0b<^ODvzFg{(05'I|PI)XcnQ@✕DZgc~>!Qs-H fH ]CW`MF߽ 4LW!0EucAeTDx}ψ[-ʂ6 RPca.>liI݉lۜ$T }ma2r.*Cw@iDД8SZ=HMbJ\=bD\fÈ?:.u.^\׏ӽe^T\Ÿqk;qpMfUr+0GfV?-$䚷g\x,x;ix MQ^cp㡌b_"/Ss@TtDI5~x]yX h;w\;YGA͞?SX=0vD3(S٫\`FkTPYtH|d++ga-Ml>VSise%sKAZN,f]o1z9iAAWo9ۭ~msC^@ o0/^Ug I3_B|¯8}wf0~vY¿~_?Zuڛ1d_".E`پWAbB,8g̹=3Ł~ 1wxaf]zg7wEY{mwdž+{^/Nn'/qݨpcݓ}}߾=9y{Q'>jts?Oޝ=1ُ+G q:mSSDӘb=xWj.!oϩ6]8no}8"͵?ͧ6}1xfó}fR< <܉8TI9G%ͫ+lH%PEUkU^#wYEg̙;S0r ):I3ס>Ѹe4β9>&GpNso:?CuUz1v6wK.8Ζ8տ~av G<~XzTQ -f% FήFF$ -tPw<&?W@ZĹ*.8R*cP2;dK\1jZ_uҗoݥط9dJ'mNM1Ke2|o>kiĸgXG7tO<*UWIa(I҅9fqTkA: (ɉfX*<;{77qd >$^nآUA2}AW^*ٜW|uUvG VǾnM<5z}v'onYy|X*eوtӼQks7|}e>p1 }E8CmO>%ن -,|XiJThbo)_]j 8?>zSß`'Տ97|u?_ G =F#FQJ|S 3^'LcjLut|kE/0A|)\n͋ͥ4_[th ,S׎lkI|I xZq\iwZR_ի9ݖ/!R[!G䔛a2I(`Rҕtܝ)!&Z[/=dB`e~9Ƅ 1ZGv6O0&83'|*Sp萊wniT{lР>|w8=2p0ΗS~fv\hD054✣T1QbXu?_\L-WUtH N^^dV {O5 zT@@"Q9)BwAK4~aL(۽X4KHIsejvnr@0 WmR-cy6K3JF\2zaiIGa q9 `ȼ)Jy=趱6$@fZfr:k)2@~Ѓ@ؠ!o"G 7O m) 3+4\v@gpH!c{9QS \lMhU ?ci!]X>9Mv ʬIi [A25ׁ{%AEޝƂZ&F[*w;(as!bY~>,`&3hɱl!Gy9|^<{o07Ne%̙YEP7C4 مړmMƳ})jY\ut,k~>k͖ȏ!3!ehX햡@H4c/$7-+ZJn aiP"fPμ#K_0,6-}^%`BTS!)UAI$[|DbS,X_@PxYAVB׮+,X Z! Y00"6f* NN \r淑`[]5:(ЭapP0V¢1!IHJ0bZĐ뭞VUmSXиyŵ!(-I{1%q\+\OzO@JZzDI<`(Ke$igҖ[%'P +㕂 5N-8;13cYY@TW6d6HAnϡU9%Dڠ Kж #Q@8 jYt";@W:VH ЃFkc|aXjɠxГQcY#`Mx …HjiTYtϢY.5 Ʋ >uDMʒănmbp0[@$Ҡktp^;J#;ҠUF`ઁW9K˳~80VXE0>h*b(ڋ,s LxnL5{^JxI*&>Cf훳`}/r}raNY^Nu&,/Aާ)v6 T$BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $:`8ɤK$(CRui{ IC$I $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $R  $]@pUΐ@Jr$Hi8@Hiå@I $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI%y]"i|k;C ޓ@l" t$-I $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI ${ߣG5{{.GMKip@!8u=0x%ؐK d0)Ep[]΅+'AwNn]s30:ACMML#Gρzg*~je-`Y? )|iR}mo* 8!fyHqVʘe ]W/u(S 7пf&b3#8ٱUYb6 A_ gUqCiABoYzqݮVT[V"+VHKվ["hUǪa+%:6FVx%2 ]1{;]њlZC0Mce<{h8>5!,MЊ5'\{Doo]o]NiM(!Pe+jH+^M)B5}j΅鐺2ӒQWE\aZ컺*RZnՕ%J."ѝQWE\uҪ "%'uŞd`/6XVWk˪e䅂ͤ{mmR8M~Ψ+WSuUbURITW\+XKu⚗7VH EuuS퐺`QWE\E컺) P] `"QWE\+@ZMپ"%CuuJ2d+3uUĕ+H"UQ]脽8t2Tɒdt5ȯvxnzC{Q4Aժ@%U0,ƝjH=r<yU@ UMGse"!mu? *㝊. z^4h݌ZPB6jIUfUXY- [ÜLߩ-vN-YU"rQ|E9iݩ;99]IU"W"[y-ᒙ['7(綊]ni`F)S5Y e+:_An7b"*<*ᡩJrTf}k/m4iq U}=lxI>!u1]Rkv ߺ` RÛݠۗmb^^ew=T[7-w㢴7͋nl%Kq2҄y(3!ےd8E^J븍*d)rcQ8Pc)ilKf/,N_ftNWI.Y2ث(իcdccTn>!Tg⛯^5/f_z=y`?}~/ӯձvY~\,s"IjxQ2<ۨŽy}h?4W2I+ȔHᩤLڐ1Y`FX.5$`҈Ε6niq&se 3efJ zc0QVJ}I#PuvH@ٓf~p헔[ϫf`T%Bܯ9&|c9!+4S`CBƬ]Iqr0sl(4W۞kb8r)²#)GtOZw}u+A 7mZnӷO B9;˾ğ/A?`1M ?kOx^d`9ЂM/;e:o7wPIy>-r]\i*!|2JHQ9攭_ q8H 4:wyp',=%'ܿe1n Z5w[A0:#D"*kpRTSDnΜZ҆ҨH>_C*]- &mGl0>;}w fi~_C\2?LJ}nt ?0JpYO~۟`d7{ʹnMA!=ƳvW Jq5.s; Zΰ+~ѣ@|umyGÏuqu5pu,P(mۥien+<|Hj`̢<V=V=V;*)(U$dqTr4(XKk3LfYr U߱+`Z[牫r#B=rgt+C [uXU er_R+uI,sjʹEτ%3"sK4;(Y"V'1"sbEUDKP>4g&gJk ZJb(w ky&xuVo[x\fNJOkC, |PDNO݃^_ 5͘ [`B+v%c%ŧG% B%BTKAJvNU|ΰ -[gJi3TqMG(9%E *J*q\ f ߷6Agm+Y^ J6"9 -Y'h5dpI%(oDBAsexMxѹYm9s&2ǸJ(KJx Ysɸ\ 31٥پ}fbZD96T8A2u9hr0-kWn8=>JNS/O?Y2]Db"3:+J&*Eec> "}/)JY(R'yHR[<$r݉itٯ|?ڇLE sF! >*y>'gL$$D璐%:ik)K,sf^¤l~u|hx\y>h OzOޟ?0A8QBchxڥ{URI?.\'?lM4Mհ ,͏8nN%vtEε1ȍ'/V50ٜQBw Kص|zsM#ng2R=iP~:0LMEH_zWQ"M}?iJSn`ڗ}𤧠T#?\.{b;q#ΖnT}FfOY}'_:f#4oq#̥ܯ~1m[nm8ҫE0;H5%U$%]+-Z,o`T-XZXa=2oe@QkKeR\WꚺBrqمM9R 0|~DB^Qm?}VTx7}Y~ ut?X~_o(3g{z`q-0I: Yw`EYn.~E۶Es#/Zfg ruU^SV4R6_?=^^ lc/&Dpu]^Ći~ W4Rt[*!rبޗ B}&ae<Lgluu7QD;~t8F1\tvX#WeaK''xNik/6U^̞W8rpӌZF/ p(06R$ewH|rV&BAqO=q\_8| MZ7ȓWVo湃72[KmdRXVz@[+_tԂg˛E睏<%Y4ҁOX9+ 2}O.zucfbEvϐ{%Qwv@^itwCAϥXݛo/J"0F+PPE{xXOcj^淡=߽muozRdo EwSO?F2ɭvA18Kl1V\iG=eaŌH)E\*a/RDŽ8" m#b&w+ =[gBo yP ڐSd^/ے ()kbʳodj  yBV6XUnMpyfxV- 4BHvn0Z7@H9C BkaS/E'=3e^LVGY|s4FMj1`ġz)jٝć?BF|W r >Э}>a@Rp)"WGShmJ Q%g6w9a|̇͞ VX?xV— X|<ź?~:_+m|MOs\|ެҼvoօr˾X֠0v]Yqa%C?ζQ˂5V'G!Vg2omd&xu)yJJ\=nn Ʈpj # fLm>~X| wzvG?:r cq9YO}e'N<[f[50[짼 P۪]ا>msxh"%w04}/\ N!ojYe4n/<s(eDOZav]O})tet/C%hqM˰:,2my[]<Ϳw]*^zܘSq7#Jy9o9ܕnlyqw؛)tt[lmF=W]nƻZMpOQyD[}s~GNۭk}nƴ[nw+H2Y4lE4Vd!(/( c*bb^z%(v]l%ŷ%UYst-cR}ТPOD2T[U:?^ЫQ7ztczZc~JuC^8zsN4ܟ}Q_'Mϔnb·߬tüwG'#7HfЧJPt˶@&=Iizt5yjq䉞CB! HrI ;Zb G$g,(Z_*I6%.3آP`DshB^bI I0FpQAcL1<ڲ)}x"M*,Xe ":)DVe]2HB{Hm#سΫ U[@)ۂE9$S6)m5-EƆL 5o%ҼSK˱x{>Bk-uwVM%ڝS>W[KIgn;9`5' v@+QLȂ%|HLebD̼')C[  &>{EeS =9a-(dRLp@_Z#c3qFv\6ӌ}c4W1lso\jta>_|οqb*fUrXWc}'  *F_ SyE] ߆RBǨf"`Pg㡤\Drغl+qF8k.XPvZ4Fm?`Ma̡$v:I'}2 ̵0Utf? *"8 R8!pq,xL;C~xx{ )jկ/8qF;7E?>S>dRo/xyo9-}uZ&Ǻ k:ݫE=X,/$[8]=F\,]'4iVmIEs^2%+'Ĝ{; qAo:6AA6?4/0C"B7Co]/|_7a[}n+U |]og.$fKllj o?:ObjkIW3by%rqy9P|?zG`2=:Y nB4.M.V2Ӫ33)#N i5\$}ly+7.V88l4>o8)/N(N]m$,{ELٙ`n|$>eo[ [%2.VR1o[yznߟM$ qՇpML3׀Ι|SJP- ވLPti4TMq71zDM7omO\(#G 2 ͪp̌aZ.h!4HPTSF)\tG*#s">(0k}^)RhMd 4Z5g<"IZ׶cx}9),@s$u<׾Z{_,S8SNځF.xj~{(^M˻_m aYJ҄6r`&K녎:Cd,w <YdgojOܔF%@wI'5ȥBJKdɲcQUPPY%h/$+rhjnf!%U,svvL߾3GQk?`K;yMD "D]4%ec=Z)Y)0ҲvML |V;3&@i(0`c2z [o.V Mb,'ՎjN~qWiF,R.Zb*cm$lQ`cA"XFbAMcTuCĞQw[Dn{~16[cB}B|d"ט=mIXWED wqv;",2wP5?єi"c3CHd]I& j~$lV@=<:aVy3+26ba4d_h@;XՅ5p !W-^@PǞDFl`g 6Ajl}(`.Newog=X?N) svNߟ.>*]MW5qWz]֟v^u4&ᚑ~]~]sĬǻۤ _+ת2nW=C!n@mZ}6B`SBE$.jt: i!3N&Kv*Jo'Nwxjs0wx)4 LnCΖ_/Fdp hd[L.Zb35T0*Y" HC(?%:|Ga`Ao\VnnVl[ߪ ]! e+MūlJx0:ɚZ$.ZHytع}vraɅl?YyUU|_3S{ڂ6yjdnCT(;ZܘlM_{i;S&|e,h}Rat>2uL! [ *VZ|t4^C! u4 mTZېD6/FB1I$􈘊r.[i7arTb@K»kXt+//o%6c`nnݽN癨/{[mV囡jFȈHtu 3vvxQ;O͈d.jFU"za`CaLQeCXj&IԉB)BkRN:s2&Fh20`dvޱyRPq Mǔ]H**^RO(!?$fD.ڐbv 4*W'@@,/2yX*cDSBf*^}$ݓMX]ԛXp_x?nPvqjo/?l8oVCQ-/qHq.f!džg\s޹('=PAM6ESz6P,0J$D0g/kߺ=&kuMlx0VmDC]Lppp8{:SB;3NO>'K]h'a0NfDBrֶ t@8S^L7C0v⥌ٹԿ><4!'H/YW-U8z %|ڒ?<='0yķ&LKward{YmG7rx<]S;p޼{yfIy4A-'m^fVG}Ӌf:3pg?ψqR_oC3a% Mߍ`׻&v5 +%',//(/" dˑn7&Za$9 2niF4ÿb[l۫ gVgG ?<ث6|n}C퇦r暍!;}0.L0#Ե"1bKr>QL4nna4ukO;cm)6Nc !XKw-TSo}pgzڶgje'`'6?S"GYYFh_³=DL1sh1PZo˽OffF }9ζrعkM' 8zIY?"lj& hH0ݽߥaq4(I !3 }u G&pĝB&$sso8;FSA@xv}鶾`v*vuW ЛV:Bxu~>M|uW/W77W?u]=w?e~;ƕ OԹ/.m#ݶLܯUyo|{pҷswFq4o{E̺^ͷ2vj쾏|PfVcybf p57EB;^Ѥ,w;V;6hMP}^]ẙuGa]QgeU]={HW] A/ҕ&[Z,CtV+FW] m)UW ԕ))ҕ'= cJh!+UWK@Ҥ+dJpѢ+ T2V]-QWD8ORά=x{N䗳ᘿQˀ?:?~3ίO߇dvw/S|ƺ-3zvsVk~>xzq[/,VF J^uCLloR;LzNCoO/h2 *QϘ:fFHhmAP"էחqCF}C hY[wJ몮dfA؇FWԌ1{J(]]vtE;V=)Ϳ3 `=?hLJD5i[V#Wsh̬U~c3[dE jQs1`[=ͱm[2ʍ.<8:51?5Zb6Xz&j̶-`pAϟ/լ7Z_zLuD|EEd ѕ຤EWBӹ/וP:"D]%:銁pբDu&o%7cկq]ν3f:22 ]V=PBJ#-Z[2`ueEJI;-ڐJוPڻZG zW L֪ѕ:5bZ?ױTyjB""]1𫟜G] .-ZJ(Z@ ZŽKWqݺt-4G,g0 QSUh]APbO>U=$mNVl"!@D[7d;XI*4sz5Lȩ GWsɛGBJ_uD]b{䲀3,jѕ+uhyJig`=ܹuAe]!]>]ƙfѾ6]Q%f*V]=!zL^[FWK^6Abʰؤj91IW VOJpʣTRuK8aP+{!CmJ(Z0QPջbx|}ҕ:Ԣ+%SJ)lL$`K wX*Wnј"F˛N9pTaJFckdX'hE]7EhE(cߦRIkN0' {f־!ij4-hZh#i /Q80Vs2S4".Jh KוPګ\!S^+V3n0jzWB;WK"17!yiwYMhqyTq\)CWU TEb`+{ym4J(Z, xHWV] n@-bZJ(]u D銁;pQMJh_5\WBjBk\HtH@6`UW Yޕ'P+Ms uFq%"uEm$T(1Tqɪ0΢4)Րm![))a\f1F,]WBBui@ӻ\fb+ڻZiZ%ɪ&FWB*#uuXpl<0{ƒ<s*μ  QbYRd [5^QH6u%V]-PW'A8]1nEWBt] qۋHW L^OJpբ+Ɨ+5\0`rANzt;hm+ڻZdƾQ%헻usr[ȿ?^)m%^o?oO@jO}!/w/|?/>p.O%7 w{$ϫ],$ooYE۟^,X\m}U[)[gTO\MGqiw/;]2A{56ǦalZ'][q}뜥8Zvໄޙny=M>{hb 7|Wqur|zq^ ֿ͟5\ l۩ޘqdRK;5(Y0PFu=ЅdC3CWmid?v`uqۗ d:D!?=.@#6h>ߦ7mnJ"Tj'=:fDyKƜ 9X\>s9 AҬ*ҋqZJhH%%Ɋ] s eNJ}zs*Ô|XkIM -ڒBGZGWHI |0 46G;%Xr)%J4XLF*!jՌ|NIbѥ`D)Ȃ>4Jt)*k_c37% >{(.Q0iJ!B QȎўD }m.5ˎ0H%GHUƀ)k|FnxJ v **6(:٠-;y9xC9GBI @dr*j]v' E˳ǚژ[]J8HIـ`64ka7Wg\/ *a 7 VX4m:dZ{΂vPѪ J(ڑkj $*Z@ԓ*e , RO [ljU!RcfQ1LhHpu qJ +Q5 7!:Ci*dL'BXp6LFJ V+dWVb@,AnTzCZq2F[@( `5ePDdBEA[(2#χPʨ[sV : s<&`&b1S|bDJpPgSO 8XC9(i3XGfұK!\+B4ަ2ݙPHq Ls$eQj Lڳ* %d~;vB+mebE.5TuA"F_R]${FӘyիf !ѿ&伖H JDA Ք`dV."2Db0T{4 {xWP>|p.#hҸA0(tr@G{ŌUDbMDr|T1&PT1yQHa:IB1M`߻Y;vL1/˫Ͷ]<[kW f=ft$MG1*4ҧMJW%SdIW=$ B+X(IT/AXżۆjbPb$S`U0Ov?ruIͻ62#bväDy آsE6G=Tʍڪ+-h'r%5 ;(XfhTYڂDewzT [bcW,XB|]D4cҠNn B`~?E bX0j`R;JQYTPcD&7cQGQ1abRuJ` t@ QYj0Q3j hNmS; fnf\HkҬUg(jtϤyfj)څ'еuZ"O{ *i?J|ƛւ LW YW=ڢ  '` aвMf@ 2)/$čtrGrStE8 8%-Cɵ+F(ЭuF< \T"6&TS.\.H1ˡ옅jCȥKH28DqBN Zf !Kk|A9oU ]шw9wL0H5Kh!׫Yn)6 }U "q0HsE_Js^ld+wnC>,=_im>1zvu)W "풖KZlפמGz1_Զ.>~Лs}7GnmΤ|F/ZV~%ņ]nK\ߎC_o K7W~ 열\C[Wٰ%C;ose\s#x"Sr=1's>][ 4ܾb'KtN v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'uY$WO G~N ;(vD'lb'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v\'W:SrR  'L pzNd'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v;D:%';:N d@@ówE;^ @b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; Nxiz?W?^}ay};^\_Phwinvge}~ e*ℌK<SaO2.ZkqPƥ`\7'DWd*w*tEhztE(-,_"]EP]dઓQWd+WQ/NWG_/CWC+RLW:R*' 6dZw*tEhztE(62bz9t[OJWOӧBW6>{RҊG՟BWZzmOI]qds*tVNWRyHWF 2< T^8٫+By^]Yc ]d**tEh{tE(`zte7jXҸk1 lV5bݚxyQY ~fuc}^k[]̓bl\߸a^ܨ0 sR y{۪дw2ۼY{We}_0pP^ m$4o)"γq ʾWO _'j.5oЯ-mAݲz|h۶D5AQ%Z67{|_ʒ3[SsEGo F7d[=74(Ly?a2  DS w77Uye7 GW>4Ӄ#ٛX=Fgwã6luYŃ4I8g1g]-Y`OD7W!I"sj.:ήlͳ&xj7:woi|z'O|rV8-ޜί׷`zܟ移-?lza.ݴ/ӵ?olW.Yjm5uxwGWI2h+C5&æn áq?e7ɇzG:~nϷ[]X# $R𻛛?Zn迷;ԿWoҹC-M^s6ڤ;> 2m&-X6p|ofKljz7_zl (S.kR)׃1iP?v]?ޑі[FaH ?^Hw#Z ? j!7!ç<4b{1Z IjOcȹ9GəNTu!HО\ʃ @'BE.XԺ5.xNsPKyvޝ8q8մZA/6|~[?k5.*4+m< ݫ05OZT}yAx><̓(rU.9+ v**zJm`y*Ov|~B}e;%w'9Xǔj2c J\{ArD\JUdᆰt]T3rn9JVP\XW8Nzedh ooS@BYȥ`ӛ BfB#2 a&K.)u( PpUGKXzHfs>Jn3HtrV<1"} cRo*BTB~8N_f $.}ޫ1 ^ħ< #%F-URN:B: V ^Վ[wеUO K$lȽzE#ƥc3]ɂyˣ`.tjkõ[:C1m21iBinK/xէ?s)+2m+H^V(1ޣbiS'  —TJ+RH ;Dc,!\TB%)ԤI!)CD*2Ъfe$0p錜1 퉗E81ӃvOloMD6>|=bv$MC+d2SK{d"c= o'ۢC]Mf&ੂP |7ϥ(nblE8>?#%u$3t|yT ֲ<ˆP3D#ރnf/u4xALz2hs o]7{5uU4l& Io\:XJO0 dQĻulN'ǹvs$xzC!(lNC(0jIrdc %x% rpiNQ7=B:Ϳj `뾘j Z@Qj ZZBR􅉿j Z*k:F4ָ+ U9ER7t}.L+V o324)~B#ZB.ka9KL gnlLJ>%]JhCp q3ˌgaw*hD6MFpJgi#_G]\ߟ>Ǔ[[β|G&דi: }}=f=z|1 y"o/ilk"76;.|6w]z`'GDsQI'_ZWMr&t:or6 WZVԲjun7-oylBPlU=l6LHݴτ4y*62k^r:c0gi)̑%Em*AF+0+xɥ$}t NT:3wYjD^(#br𐉶"sC< BR 0:*M>ZO>ja*XvAĕ)2Ud9M6$N'ԮdK"{Z1kUH:SNr5 tΉS:>`f,ͩm=hZ<~jz~j F1ĜQ>?mV]0 FW:+b➥ؚH׎jHk!V];"!4`YrGg׳܌9<N7LN4?+GlIu\-JbO3)+b/cj<(1?qc_w;5zCxצy<ËSbh;zՏ?.y_w/_yDž}ϯ޽%8 LS,֑`IX,N ƛ --?n]ͺ]m5]Ny͸wG/H2;# )~r?'MYƣ\ܜ7:8 Mb~>m5UMxMx[. &}d=>/^2p_8 P%b~&#He aY%.r'mP ?F$E7R#unlmyU-=0(z[t`5q41tk.I!qBN%#"ibɝ$M.%qg'D+D/c$c}n[VƠL@P瘾AmJ܁5P@D *$뫠AUH.<}筠Ɖ;4[@Rg8IG=%,JL.m9G͢ /XtjH% r˼uXӠC?T?a5'9:E)*a, 1@ Ǵb$Q;#0%;G]wM Eo)eeoڮ?g5R-L+}zD_}K@E+E b╳6Vq%G۫{Jo<98e{@> TTKVcşOG]FPӆi Xk<^5BH=^%"Qșh$ULTRb169|ӹط< &5a42peРTL Ls^BFhlRgLTAi[CP( 124IB10" D銜eCXwcL0V!G1>*טˡ sQ9kxQaL!A"Pg0h tB^r; ''"&F$`Ik}&9Q)\TXd#s*N̳$`ȉ[( 6CcYY`6gzF_!4s}0g'3٧ 諭"yE`&)YvLI1@l6U_Wu%"if9FΎrֿtRa)l60*MHR5+@ +,TL6M Q4 @ZAKN!-POkI T"<ZcyjRAB xPZn1NzZY1#^Ȁ{X4qr"L nx$ELDyLJ@z,ѫ,xdqV5,5P811cpZvQjcRYHQHJ-ߎЖ&E/—E&|; V-c&%F[XAB Jz,S9O@HĶ N꿏GIjr:5,Ws\KZB~x6]6PݶY3_A^ ˫eU@R9, Zms~/λx|rw6<8O|oMDyS{$8%6:pcq+ԳbɵZF=Z&?6'SXƮZ%jj O&c̱r1?H֤Y[FH(򥖜N)bxЄ>z<$Ytݪe|;WZiq>Ee W}֠-2=mC5>d/6Y[L[5ޞNCq E2,Ei hK@pj (n> \<{fn>4]N;LD]߶Wz6/n(u}pcrm_NFK\ۍ+ZڒimiR:XOdrAST 'aQ5@`RGTRJmɌ6>)qG =NFZ=|ժ5hYUvuKrLLZ_lM4-V~Z5G҉$U8 T*4LSKƴ,MTd<A Gy+y^ !$" fC²'-):KUp[MVLM9DŽRn&xS( Ȣ`;X+d%!RHDc {F]@>e#y@Yrݐ^*yN on/b^[M 7;q[_g | *s¾VzR9ѴDA(NFyk)C*潷n qbVP?U>}?)CE>T-Vn_'4ü:&=}z&c~bhHݛ39;5:ˌ35Iu:%9 }Sbr==KƏ]}_Tfg/m&v.oxdTW"|6t|+QD~1;Ihu zCiϛmK p6_hp6|dt1.;v Pj;{gm ݄1yU&H\mx|o_Xwqoɻf'"rf 5ִ#\9p4̶g#U8pd'qaF (EI*X?d<%DtT~])ģ1g :"*%+D" e r.SK=^KI٫+xcN9r?$:^{sL}PJ_' ObZg|>ۇ?M&t]*/9W0i}M RjxF0o3L.'QNڏ 'H PL䮯xXK/}Al/"⼈7  ])ʨr$NxGR 9qb§LBK ѐP+ l: Ą3[XA`IJA)"53G@Ɍَ.VK>uf}qeE2​<3jssDR`9ne>tP@$XZ4 y\| \ f}gC>m j5rn8NE츃DޏDm4) P~ n634amu=Q1`*ݜRϦza(R҆ϳ ~;y.yU+7F/P, *q 2(PVgNVrV7YQ{ѫ .eCBRC~6]6xݶY_A^ ˫ew]VZMR?}\+1E2K ,'jyhpYϧUG⇷o&N|nt[mx-|,|"l>\zM?J+l*s+M¢p$zWtǩՓV^JLG-OWR~\zLh+0I, ]7\zWZ)W5+V':angf^%5]{z|*lۻKVUlQ)% Rz*I 1Z!:G.yӥ$l?5O]'/,u4ǣ]Hk#NQb$]b cj2#)$mĕE?d0b۬^ƓX%q5 ]OH6RF5 {=~5zm.?x$ _•3bYs:M5AT3=~y2Nqfzh̽B2DE %CTYd('9ILwcF#{Ԉt-rُ:1{Qk!ȩ¶( v8%aG,>^h|x(%tuu+¥&@5)0p R;f2Rʃp-1KFΎA+4Q@|+{eRZ_%cW’#_BGvgWc⮐x?vLˍ9,:.wtHa"UF8~#Ty!). "TնNXkJ79RXe1"yHit, n 3âJZIP@"A$yzA}f(`j ~}B쩅uQ[0+}2 a*("x}60Iؿp4@&& j92JS06A1O,7Ɓ~A >~9AŌH <{$`9oL`<@=T5&AHk%ɏKK]T#Ye¢gr̜$#=LՎ. [[ v _X`&?u# H.m7J/2)0uc aHFҸTr$ f]TU\t; ؖ*'ȸMqult5,[/M`OzvjbvjkOIq }#}A?m/V-ףg7?@$,ۍődyۗ~; bHrtH`nx0j0[TlH .IEv[`)7 L~:~45ab-ֳ&{GpUW_,:.4擼D/_K /u/c!^5lVmaua_]e#m-q B RD7*ٺ3xO z&ᚘ#0oIKY}GK/6 0zDNGL޲FB'05I@(CNeDqOɞΠ:|4Ӊg7[/ u{赛i7Ъ1kQY#qv.Y|(GfFAX؃A8y`úi7\ arX t4J”`ذ *%5'Y4*E\E"kJC D@9$IŬ+R,`dكI%esfaD{)ob:BX%Y j6]4Q$'Hd2#hCB6Wl%d2kRTYfF6{6L [QnC9RvI5HB>z3=1}GWvo?Z'- Đ1 okoP,J!f2)yO6bw6ltKEN\V*>j= KWQz@MA,$C2(!Uǔy2Z\6hVS22J @RF͂-zҚB"A).HL᜖>ĹG`>c B&y~9*F71b=RӷRQ-!uR IJ 1ZA>ʹKh 8 B'Eyh,wUgCI1iu)nһ\i;B6X*U&ŦvE)]IVdj ʓsl6gU'rDZĠY3qigkoEÃi4FvN(Mh_IvXVkl V %Fhi({A(ڔ-kj}a5PHTg!o.{ Au$ϔ OS&Q^EҎ ģ0 Fhs:;?0f:cJv0]anR<5&Ґw`Ņ3"I'cE" wiv+",)uz#7S=CHϤ5!n&2Єd '$TcFkc4J9G;XAUi!ZG㌔?t_˭>׉5q>OV1oN='1iMģģvƏ(m*!wLoНhUS".8]L ьiU)BvY6IGgY?.D3soOI =Bˍ7rW[{bn#uۨ#ͿAV>9b&u&i69D{oCq{}w-j]ճ%Cُ-fsvͼ3Puqۚjv-^-7+M-oJGJD=O>S*˷tAɂ EtZA KWĂYG1xԍKȲFu'˺]FutߦӯtmN#z"9|_o?ʣrl EVR}MH REHgQQG+R xscAVE &'l[-P#&f"(oq!@&+e{Yr ]Z7l&Rz9j|{.+<|LPƌ]QV4cOD1A:~=̃0PN_)b} thW]ųE}=ųWq8~Ӌ ͣ _jSzAˏ?HZ/JZqzr]YY:$Qf3/gTϞ/xݳ?/Я|u~E}b|& ]zm?wtk׏ϫ">vS^ K瓟~ѯL?&uzC5&ӵOO>0.N/7Nv|^w倫_E;,2Ǡ;A}"lLG)iovȾgJ\>}읈/_ (REJ'K0AU$R.`6IhlLgZOvhj뒳+:}SbVKҤ^U4f/P"q^9=0"ALjM< &s|dXG mZ#g˂cg}rS:=+MR &ȏi^̣K4.rc|OX`9ջItZKH$STx|0"sD`t(!e}Q TJ$f5"2!Hh:L(@.8 Zi*PK"D ١+8*ǸJaʝQ`#qm_F]p/ Q4ŏO&YWս[&K{3E?fPO |*&/\q ,:FxBmCK4t ILEKK NxЁFiy,|8u6z$HH vK  2v͓7xv*|fqRHF4P4nKJ2b1Jg 倢 bI}kR??XAXEn? m"j'PԶew Ćyw %Ԇ\7Zjܾp5.S)#w:$ѐT'rMDe#D#!&.;5' Pku aoo >* )97e})qL7KNo_nQv='z} JO+2Ï+ݻeqbO[Ȯ2|r|!Fo7ch.Q.A c>h㦎݊95vv&#.)}JXr}8?ÔzLz2;LD~7yB~׍~Mc;q8)N8q~RM˜ 3Ahw}djss-w Ezsl0rtqS Vu#k`߱aV{3l޺nj&IE6oMe;NG+ }&ӌfӚz;g_]xFݕlI4qM /eK,u2˓r-{adPBw䴼8yy^B-]p i]q 4I $•qL MXB%>%Fn5 e:׊UqRWZ=s/8-“UAOP'u)hY񗱽/Ne% 6򇅬 ӚgcIHu^|ZX'L\ FqNx눐 B Ʃ8>qj.r$m01hnכ$:%7=5ik fc%ְƌ =7<BA&Û8s՛TTs -@ Lq>ȫАHRnR$&CC/*Ac: (A +54rg2x`OKKNZ27g,hޓ uA>>W^2bd¯m1 ;| Ŵi}3i`!f!\$  H߈05]#eLiDˢ{,S<J>Zޮ3hɩÌZb*}n>Th#a dcSF nA28mjMr|t|w-遬RYU3aDŽwMݱB˦Ei,dl""+Iͦ1>᧝{/ڋf5cT!BxچtR34!^hV y`*ҭ:<  Np҃jǹ2D&!)eǃzir; |˅8LIMa(4 4Pl9O,%KGK]@DN,'!@F ȜZ9JѸ RC"p{(T/C JAvr:Q@ 蠵^$^, +\L^ AI9D$c=:MR  !>gSYc ڡ9,4p9P"/٨ REWD  ߑOg?L.2QeT)X&xQ:N؟153hѮm#hgO?n6 e̚`Y͍tiQ.WZ|ݵp<+4hy֏-$%,Aшy=J*>u, }XqR6*Pl$HLd;ǘ (HV£Q''Vgij`:S\/&?>U}"ɠh"~D6u47sKDQpjjmfBBNx[ єV{CF&OU+qK(v`go8U*v}=y׮' '8C]8 / &=n7J+S.{ҾgѲ@($ RB/!hi 3HL{n~6y@B\wm:ݪx26^i -{YPػ*6iA}?&0Og"$넲>jY-4hN[^fA@|e:\xCErY#Tc)ڝ`u4[mMr=)ZcKnQ'e ng9fxlͧiNlpa_mS,ɲo d f6/4/p\_t%ޔ{S&"_NE.64DsVr7+(Uyۻ`R)߷޷?ЫTpQ/aۇ^[J=n<#5_Q^j7w+z$ L`A$I+rF JJVtutƒ.yZ]Z?6ەabs|Gy@^sf[|½5J4m ;-R>gHAMBծ3(Ebh18@ )K$)PJ'cm;FΖGyuﱢwJm5*^$W?+B/wP#6q'fRbr2]?cu1e4%ɿލ0DHb / EVU}%k{[AEfв?@🐾~6LuϚ#/հo^)y)Lvpc &k8wK2Ulk.>V܎ UA8[F7'$*?LZbn(7W+m *i\+زYjҍ%{|RQ.Td>Crjb*6vuy6JN{^hxsӧ6γmyoi͵dꆎJrakf9j͵nC.7B~ųy./$tv@ חE)mo[:D0G[Cxx[5 (Wq-1|G/7*Cƭ`~~ط(){;ny? \~г 1 1YVKd^`W8aETy0d~?!O_$('R^N7oQQpB$(H9wX# cC*۾M3uR[CVv&!~Nݙ`;\vgj5?Le+F' ;#uq6*+F]ejd*;ߜW4(JLg28uuT2J;u Օ9#u F]er9uURN]u[.=\ <ڍ`mY]D.%ynr/: &$r!\`!䱚("/p8}ok-PA v7ſUұS=!>Z|y493GtV)<'jSiA-XAT0kY ^\30~Q/dZk^;lr_+"!A)T0iT N;\TpB+:p*j}tKeS'1PuBMpTUH<(ۍ$`Oj`'!W80JRQk"tD,hac`:m9MMߟj]^+ X h(K@D0\#(!$8\H6J{CNRNGR<"_2Ҡx%%hKZ$C>gĠu.ɑM`?C1UMuiʱZ#Bi) TEǜf*g ".Fbm2$UQNʽ+wxD牔yF $@D.!֜M1X'c'$c]E]܋@[9paPe.v28"G+.6˺Bh^%e|!mj4++"@Y_vsjG*?8NNdO*>upiH\SBRvOcfHF(HQ LC7uF `\啐HМ.(jgDVDe-Pj@@{:qj Q 9^e[C Dh,@{ЂO[2X1Vz'w»lY]/d5GĂᑙ+A Ik LLt֨7.uLQ&5Pŭue2Gn 60*-q\ N'(öua50f{q{$zםd)o;*)(o#F/(KkL\Q'ʾ8ї^ӝLú@6X*nSJK2rmbd3a ExK$8$b_^Lڥ߱y0lzxwAmʂZS_*[nv~K>* d30E|Ba* \U)]1dA3= ?^N|akU l L~%hH,A,"U- .C˅*.ѴP|''eEwνaZčSAS7Qi%ƊcH*t$'4 N;J8ȺZG8Ytb@i-y " [I*H([:>yhX8'l ïnۨKs T*&"H`$IQo*(mx *FJYԿݷ^c$Ā>p˩xe<J<Kw)'RىK=2&9X%C IΘHH΁@ym-e}b#cRnYL1],ox>fo>hw|-YFFywH q?Vsȯm%7ෟN8vGu < Q[7(`R3)9g!C%vpNNb̏.fs٪np9ZB;׵\mA03)UcVq.^stt8?;_ͩ1>.x4FzS׉S\>`z|eF5`ڻyk ~m/xj"$,9s i4<9mƖ۫]'/2Oޢ-wׂp^w$=uu:aV=NIOz][Nn2].zvl8Q2rwY e9;>?Luł &ia_7F?Ɔ OQpt}_?)3o}{\q#0A?X_$ X_]ږ[t͍ܵj zvW|~oNH3; %@|Ǜqn8k2NGОx䃖U=a62\ .>b4?[V mT?\Jz{,b؇|1[H}Pԕs\/֭{HD`)F)2ɼ&C-7^ F9٧ zayU={ '@mvWK|[t8oj4*FT@2\pă4 1rft`~q@-VN3wVϧ- #kq\!';_FC/3sͪ>lvRqˢ/[6K ܲYJ){nٯ[ix4)JD5Jب% W'Z*Ü˄}Ch9{t >秃q=E[u8-颡Ŭ};NA%5_}sm2V $(뇇A>>ŎOa<}xvO 8tsq<i'('%C+!)$Qg|_;ɽ 'ZI09$$|=LLYYb>xn -@,s"ID*ik*@=ƘyVT8c25'O{^2sVh]Oh)B|jX7|&RDw)`lhpW 䲗ə=76RcSymkLV{#wǠqJԸD3}Xa@G[DDc*osBRR4œ0Mώڲ֋>z7z1?FC7 |̗qZSI=ժD01`&r#Z $Ѱ 4ۨhw1RȔQ!r=0 &\Ĝ!Rc"GkyeJYTyHr0}f?AۜWi|8ݎ;i9;$«UKc~b! !Sfv \TO-$Ce-g}Y\{3ɺnZtmC66 u/g7CBR fxMG0.SreML:YkDJ+N !DUȈ'D Bz=w)3בxrDMBMi!_T>4044~O>у,#=74 >zZQN&  LgO_~Ȇ~d[7:?u[TǛ`|]7Z!VMe?4[-Kz*k.7B6kA2ysK-A1`\3,dCa6`-Ku5oɮ-Wl;?5?r MBޣS\qud:b|? s:ME6(Vts^jѭd{jgM{[E1jRi:IoF/k= k“IJF2YlhVEb=}}S:ftqW$_ Z3if?M&ߢmߒ?uwWx.gs8Ej.kDeʏд;}dZws5,ZslC<ޓ.|:Ԣ>v{# ƎjڬK =q۶Y&fV7鑻vX7qo)g!Jf5|E垟xׅFl x!R9fItQ)su~sCFp6[E4$ 3>) Y~ wRQRT)yb2,I0dgEF }`MtP+5Q#DpaU HЦ4l)q6#㊋/]L;vDmN Q`LAL"YVF¸jjA墒h-!41 Y<$v$CFː XŀpD?8$jblک_DYd`D,""-m{="EQu\9*"HD4U4JHm*L^+P6J}cS2peIRDFeQ6ČYI^3QFjnJUF &H* }F]YB8!87Aa g$Gmg<"ҡZ_. !\^FF{$wf/YhS\>gFˇ<7}Et]?ӞrÓ^F۟>m}/xks{U>aHo_MHHthI" K6"pNwxj<;=3Tc (52ΦGv=|zZ6x0?dAN&QZa HnCʞ8q@KIa-\6$=__A/w`œɄQ[ML|2}sy=KwGz.yX]`Ƥڂ6:̈́\| h3)C̑gO|NYPZB n\ }U 7wV*n}I.'+MJq!Q:)ƹ\,QE  v+*G$ 7(= :DPp37H62b@*딅rl,!]u58{:NڴKYQ;'"Gh kCؽL*'[=i Gg~IQ#J˔|ޫ^"ߴNy.-%l4B #5ɝ3@:d;6Cu=sgr)9֤ٙUi\. eYЖa JSN'sN>v:Yr2*Fe3+38\dh J8Bn77,R&Hd sUpQ _ pœ"BmNqNC:qZ͎.Jݜ\DXuw74nr^t=󛋯޾z6GIihCQ"CpnYf%'.Ax[%{nh}G)Kq XV=vodhAN=Лn.t{hvcE)v{Dܸ\}Ǽo#^Li RX Y1 Q2@“$t$v<8dNRw duҝh%Ԫl)42څX(kJz`gB4a=(2H ճOAƛ%o eacbf~fØ}NUA|+^2>fYoE]:Xj}"oE2!fCIRgn|Xt\`w:;εs,Tֻ2FaK!TғPmWl |/x:ˮ@{݀U=l|M8+Gؙaj!OU?i -X5hgbk4'] u,Fj;b9}ˑG`ޘ(ϭhdY#{QRՀlw}Zak$rN$և8b@J.K)ީkygkp 6)Z)|-J/=X T2(2PBZ@ *wWԺ%_/0%/7M I mϓKtw8rk46ȝ^䏮V,S7&fp8y7>d$;ńlV8$ã5p:>';Bd&LfhXn͋G?Xٌ\܇A=*~d~i(;"Jru/<~'Xk:BỬnQ$ߗKr&l֨451bWW9]V޳<&<|kz=0m 6žu5~ mǵ4~\hjd\}|B V:xG{pnvtL:竪WOع$w*k[oG°/&$M: լy,~bt5׫oOs<{klnHW2-W}~sۋP{;o\z[vyR%@ݪG v do\ u_;Bm_8}1xmaOy`n3lWy>^HtC iV:mFRi|*6%Eh4v0G>N&̢G%SOQ5Y e34[y->jQ W 0NA 6SX6ZBP1,\쒀`tc)7V]Ŀ.FLUqb.k2tu_٤ZYM!e ?|{f^n1n]5ەhͤ£wAaZ~oS}۾͛orcOXz zdaT?Fkm_, B+Av¨(7 X9*CJK4ph\\7ꮪ nRrY*FgL( rV_UkW\*o:C\T Bp0 TYpZRnۙ*8$V%+ ʉoFYDjʰQ\ WzJ̔&• >wW=]ERѬWPLpu)a䢟W,dFcg&֎+ W+i"\A0ypr̂+Uq*7wurW̯cJiܕv\Wzk>Yc >cYz#,6k>ygI@$nk*846r暿v*潊 ?su L0Yb +{,hDVaY+iLC-/ͪJ$9GLp<!74kW*U W爫de*w xwȿ _oc!޾CC3'ՋɿBA/[/|e.![/duI=[#qt ѵB$ΔN3J1~;^!>}ImϷܯn{{??\ojTI4BK(,m]lcS>gl޶3-!t! YoRF<ĴscGq_gd .X0 aRRSG Xg)$д+fA[8a[Fhcn ReXLĝe=* qk{vFX7a;R#ec`T\{/HJǍـl(1\kw˽Aq*!ȶ}bwb ܂J b W(c"ȆeBGk{iB+1VQ KHP gek|#یdW,q v% s~ j +ZgLAA[@tLFYpѾ@S<T*1a Rx{/ G`&StYjMPI!0TTMw!Mod}A|J[~Sw 4b@TTvND(|}m@&3zOR/k^oR|[- bcw.`H$xG 3P.;ҏБMJ9jӪ2Va1 %O Hv9A⋏*%tA\(z+V<@(E&rZcd^[1P>X,Lezx$(Wlepj#VX܁m`[OEU'GuSYߚFbΛVmG5jee2 v"}X`f>on]ߟͻ_`\_y~߿e(S-~|ܠI9Lw)懷. ~TqFlY+r:0UT.G@c1mg! hGK| 9m̵
󁃮Y4Rc0/̎hPrTa#Cۮ]O Pq18- Ҡktw^;J=;ҠUFw 00azU2׏6 `*SMa% ƕg݃szjx4TJZ/{z-x%74jM67v6eopX2?'5굯t1SR bME@—ޥyJNo~2}A^Z0̿JRtXw&|e_Hb/W?jR76>M{wn8wlu459R w\jkb*&Q .pвrF$̩<Ά.ʓJ" I$BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $z$%UMq"@ΆH A@-@҂}$@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@oRs"TQpC) r;  IHF.@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H]h`M$)!@\%φi=yhAz$%e-I $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $ 0ިGهwG%մu^Vg /{bv4==K el".W.i>  噳J9Q(Woy_z_txtKIQK2Kj)` s2ٲ}sYӕZlg4s2'Q*Q+? u2QZpډR5-ffi" sԥ?i63+7LLjH^u:x[?"w ƀ+ሯ"*/rwynKi=޶O#( ..6Pv8{·)d-Q>}MnlS/jK#lk+ua2Ɨ6r$)0'R/,RIuSaq-5\Rr)fmQKaV)m_eej;KBov}m;_N>n Ts{f˧6_@/?}ߧvƯwu.v]>Mj7ӕq<,\j﫵hܥ^mY$Es>fNzej.p̃\Fz(y.I3n?bcw $xX-< PIϲ'>)88(DɊl2 i٩Gɤ%L䛌ٲd-_o.ƟGͿ"ٿnp` l3nveWvy7g7^G0^lװه>'}8.hG/,"f^]}׻tQUqCgx虾gm:7opyd*:Ka(c6-8h`D2`U4f MMdZY X3h^GPԌj]({|?5zyM+7d%^VCrjaf}i5he~nM+q{.:)c3(#e7+Dp .fMr{P%h|*sAt5 z;kٴCѠE\VNk: u-F,d-x+3ZeebU0r<X<ג*q"cIViXQyΊ{#y\ic9 Wuq`Ugh~W; d 3WN营IrH|$2f(ID'2]vZAzFa|smcqe-eKL:emnoIU'Wih~*hip\>8q1ˊI*L*⒨ˡseGz=[YRl~-_{1uЛU,bQЧXl&U~S Mc~kL`(ݥ4_+7mc$7^@r%hL\\FUGg2-j|7VlBp X)3K=(vrӵN%vn^v[߲kz-03'I'O!`G VA7v3#ĸͥ_ߣE\^yg+UL(sSBY=^[tvmo!wVaNtՍ+nK xl۳Y n޿]w.sT?Rͦ@;Yoݿ]͏>Kۣ;wx+wާσB~ӯ Yb7/XNQ#9j}2VG*K(5Is>hmgɤTD;i+pqNDpEPZHΔ,ORrbM:qR܎ST,sewtWwsw|7jrf5|=_/\Onah #tS>Bu>)BRCK,N-#aLw3!vܶߺҦZϛrӫ%/Yؗp2fv!wFMNydW5 q*HHgs*n.2{Uxjw ggOgQAq*:hgq%ŵ_o;U_>٧hsYgi58)0Xm׎ I^rN% "h0L %Ldl6RUVav4d V&ACIZ1b#QkTb&Sw m/fKoi[t en _4 cE1d]˒\ #1Va*u ۺʹ=]fbZDݬ Nr }M ,#@ IO4J-Έ9^hHDb"3:+J&=*[$T6tFN 1FgJr*"^p- C<$-tKw9ǁ@:$QU013JH3LQͳ99cb!}KB&뤭,irlZ8rT]gהЌwSK_o\Hr4YYʡ'TB>Tߪ[C 0. .˦Z޻wFZS8^V hFms=;D|h7(6{9%S5 aPN  ӵ_SPٻq,WT~fUt{I6٭yMpՑ%$'힚 )ʲ L<86 LtkW0 N[lB&JatťCocwU8J˜2,;ng7Obtf25e:8__I)ximU ljK?a8#̏.iTz7ѕwjJii KTw^/ϗ'ͅWa8h%̩}b 1;v)7 J?ޞr/y6Z.gMljp%oa,/ץb _;B6 +y% eZ^/6GWDo\>u[:*!2ĊSXSe9^ ؿg'NF*B^G}\p* m E!*ʨD{d'au{^핓+yåt6x0G LO, &.Oؿ0i녪ǓM )ZX`\dK22q'+|\j3…40;e${ЦU7tVw[~hen`L:/=+?h |h9LgWތj&nrp\3.C6)ZQqZGm&ؿ{O/1Ì?pTq!vPHWF1'B# ֏x&CsCһ]Gwa(\ ibE"`-PXr]+XvLI!Hqv4<]˕ 0lb)JyKl#a@WX\Y-c`8T{Cx'uC(*ս)8@RLJ-PA{e0VF@<k$<22&>pD>sϖuӴ4]n}iXé9>Os)X`ڄWJ=0%Sj$8턖G懄-er>XW߿m]zpsmӡ]믔pj[lW dQ<횿Q7UN>bg4PPbJY Si24ةiZ xbe<O3hFa$?vkj"tuD>xl0zQeL)i""598=6@n;͢p4.o<)i_G1<~xlmvCY<4b`3##S%AbGF =Ѡ85b9= £ BF`a)THBB/nPB@I1f+4VM}B#g: Hvءk%6WӬfvHwmє$ޓ`hq (*-SkǥB!F 6`& SxfG' !oR* s^ %"(rI8\@ahX" Sj`x}iۀc4MF٘RQa\`/ z1X|Za<$؛AUj1f%C"Cw u/RTڲۻ!d!Tn ݬnT8s )GV,z!E;Y"yd"\,D.?(w(o(73CpBJ,y: G:Dsk񋟛&0zFF|v:rDZ:|Mz"'LXNԽe0_ ;xAuЅ9BԚ|E#daՄJ>7 ~t;W߄߄av6P.e\ ) Qkbiu,Φ؈4VNgͤzv^nQ Iq&6M0~&~×QJCz4!ѯ;JYUibMǾdvWEo%N誜]_tV)!o֋b96̱qӛ2> lL/'8"(e=ɕ&uhNwQ'䟷qZK_,N>W=ZD1pzM&b v2t|-KyoPG17r7HԘՓkbu^-xo*|Am#ʎmxjJݭޫ8wTev1E&jH\lh5ߛs֝K+k}jưZjwtW]gDG#8a C4&(ENZX ~Xu)x藗EߞP;Y`x0,RAGB"2qX *4(8RWx\*Z11yКv Pz|bz br=i8L5g| ޯI64Q ǿ8& R^@*-QXat8&}wDy^s.4M-pXjJdB($ l5%_)/Ƒ1"L4l$  AHRVa S띱Vc&@=1h@vͭ„8[}Z'-}XFQ d%FN1!z@Ts*+?a <* s~]`$S{+%RtR=PCɮ@K</Vj1=ڭr~=:jao+2d5hVy[)[ЋV;CoZ3E-O37H ͸ JG8V(Jhc482*`"wF(@Opʩd-iK ^Ȍ ^`A NGXL-A*Űf슅0 ݓf 'H QpXf&r`TiLa<,&x"c_~슈,"R<  CQ1IJF81,4s*RńOلA V+V!!Ɓ3uS/ gN 9HSs7"~9 -Zg1-Ya\$.1sp;0S"!J! P:HRo.iǮx ! 8Us䝫_6WXQdg-~TP~lN< $hf>ޜ_?=^:,>Gm8/$~7}28+{}槇IKYt_",a\d Y`,Ӂ+A=j½'@ӆDC^CLDwm^u<.[Opou'6v[T|Q%?}Į mPQ|>]{ ^{3iMzQAk /#|l> <5T{cĹ./L<8S}9+Ŋq |TLQ1prESB^ ُ䞭zJ.{NwVLMG2I3BG/T8!@QbgR2tB]auA2"-kGC*yeq֧COYB7zh0 /h,mgl ӳ\{} ? XMB ^EmؾJ`} 3׌JU>|_C *J P% @P=lDԥ`aMP!ozw7Zfd70]vJ-KO9(5X@*pq˅FR[=)(HppZLD+T4hiB$ Lc,-g(޹5׍wKe;sc>Nʋq*1KZI廧<(^s@bUH3i s&gD14!wRP bpCIG uf'uA{OIn}ti?^=_rχ|EԨ93<˻?7ykkBYZmXm U- :h}yζrl9 {[ g嵩C1EkmU-Xtt 6%hruV4\yꀜ[tB=-tWـ*tm}pj*y&g:evS \|T4b.bF.DAHZL^nRz;V:׺{1Hӛ#KsN_W,uj\MSÕ+q鵊ê-W,)bΦ+RɸZ!@pE5h1b&Ju:X}zpegY9W$(+W,\ڧqB\+l ˵bpj"1jι>^ݍ;,Ϛsޚ)9 }<ؔ$=4 [N=`-v qMI>+M3a?~3_κrQcg5Â^/'>O}73~z?Q"Xke옥3G_J:W`cUkeC=5yq-JqX-[yFIj`+Ӭ&iVr$1>  AN$HVOf/f\W{Ͳgȃ W$`V ?e蜌*, W$::Xȇ$/W8xEH+$>Ùø&/|38I NSi|Z Œ}MgH+ W,E)"tbp rGz(H>u8XW+ĕ+l?$i+W&]ʌU gYM.Q~omY_?7b-C>yo*bSt E,KU2ޜ MͶ,h=/z]w9noӬ>\ v6+7}g@8þp\[CLnmZs jG̴W߹Ujz]?%qMovQ1oE/c|;)6픏Ʌ-;B04w9  MdNB?Nn'b מZ /3EI4${9ϦYnJCjJg{5 KՐ:q"^7c%|b;`+ĮF/440>ZDOReӏ E?1$gf#`=ʶk1RƮZŢU$[jʷFy]=D:#}[}1-|hĵ*FTw\1m,4# z>h7vmWӝß1ہ vk{`f&>:=wUͷv'Vc>k+$]oNF=ۉc3Qn(/Z?YXb!;.hߍ}/_w{rJ L9@eI9@eR?@e Pz&ڲVǴ3*8a{^T &Z Im`͔~J6pł\\ʓʠB q5h-ɻbQN0 .Օ|ʐˇ\ّGJH$F-]>l\w5Mm\$ɕpe35=*&P^ !u\J3V+g9W,؁\\o.Օ|ʨ2ֈ+Ay'W$XC+bnYM޻bٻZ%Yt3Ȃ+\ZUȸZ!Auvł W Cb:{Wĕ<)~I"eє40q D# 91m@(T6-s6PVk=TbNfs>m-\XvT rHp9g,׈qX3y2s! 'A6ǖަ\T!zm,>NR Af(&z:IQ9mh҂płˍN HGc荱pE Z H XejYPWʍ4;RVy|$q5MYPp5MeLѬ+q5с \i1bHRjg\W`=+5>u\J2V+㔋(W$[97,;)"Aq*Mqt3Ȃaυ+\Z J4W+ĕ0lzwI ct##y\F.$+A]le9)' Ѿ$*9q,׈yIji*JBWap&u2#J!GԄ v ӎsL\R0jMӬ2b 1=GʉFub&d\= wNkAbA5H䟗JW1pSeAb:+VOM`dڌ+#MT0`Ϥ$1LkS%<'g\kz(W$X#uJ XmTW+De9IrprxW&+VZg $؀+V XUƐqB\aA{AbQHnTA XIW53bLAˤLAA<)ݠe9ٜ7hOG?jI(Yb<7ƐG*×ݼgqFpA76(^ߛ5~^ L{ j1&FE)f&yLJ2WBH 4A!ˍbJR+VqB\EZT g+RkIWsUiptp &Džq5Mn\8isZ |2aBվנ6pł+\`RLOճ DAb+W+Vm qe>oACqe M+F XS+RmqbV\`A bnImP>u\JϮֈ+kw(W$8(-W,W+)bQ;4j'5i`>~RadbM?8(kbe3욘,xD9I%De\tcijKUdBWYe)eRFBe+<\#$y,N;ˍbpQW*g\W;m%`PQ X.$aSt6jQwE\b^̙6u\ʠQLoԑ A`i\MK(7.9MeLTҊ$1j<͸:C'W$(W,A XשUFqB\A(AbQRpj1y\J5ʄh+\\ Of:XW+F;bprAVP4^a q9D'Wi1yh\?~3.=(LOt6yш.4mAZjrLw 5}?Ծ1֫/vaЕ|B.ƀ]3ua]T+m#GE0;d`LXmG֋,it8 濿bNK-Kc9lu"YWdu|!qIszRyD xv;2 ~ UJ2I $T6SMf)SRH]m 7Ǵ) FMgrdj !6D99ueXUVCWWJiO+Kl:uG)U2:>pi|ܓOv#ݨ}CvؚA]ѓwꩤD#RWH0hU&WcQWZ}*SiN9+&qۿ$^gt 1pQfKY|WƗ/Zn‹A?ZuXoEf"E\D `R!QcV;炴f9]X;{Dg\OXvXooz~_M{* mՎ;:?PmdwͷVӌwscwlw i(Fq 'AĀs£Q+C@$#$SQhQoB)"zQ~YR / JhqDv R]j~,v9SkeRQu.9d-]hA̛UkXWaB/NچBʋqyCh嬝g'rynW&X~\; B:c !,<+!Ҝ\ogUB݅+qt[s ,{~,9]4sC:ZYV躝!n+RV梅;!]o"Iw·;.bjn n3{ ľ -Sx7f"nI-=4ќ&$u) q^Sv~բȝ Z& @{:qjeҨfq'&9=s2:!9 t#("\aP\&)pv]:&Uw%u4qZ0+xk$+QJOu,s?*%g#M/*FQ!3whjC0}^fߝ\/eޠ4 whۗѸ%]?{ݵn zdP@Ov۰4Je1vz5,l( a6cM :B] 㧫Tπ|?Կ0b_yq8b+zz[F? -Cdf e[ŶKAMsNV{aus-*ڍ7-Xx<et?ڶZ#cdQ'nkvr@aY^ԸDS Cسi$ h+eOmϸS S0fIj {#Nvt.@A˅҆8M ֱ@ШVc67yia zn]{@⪧ot=%E@SR?vTRPIVȯG%x:6dҀ`Q[F;"vL&L%-r2 @M$*\"fX9cR[ o:Nt/8in@vתA~Rٺ߷ Z ց`)X՚i# Kg,rE[$AjV!Z$B"Hc>-Cs&Q8S1t<b,!p,ĵsQWw2BXLQ_@t,aԹhmBw7W7"6EbtԢ[!f^Z֤4)SMDtw<3{VYDuo,k%=lu\W}m}r]wdL^pUxt4s/~^^}Q/NeB]s-+mJxJMIS  h`X- .\h8AxgKP GhI5cyc9צEz[LI~đl٧.bgx0D[ϷzweZVjt4:*jôr&21n$J*ǐT4INhwPDS/:^;;la<`ZKĖ[ 'Җe_SQ" 2K~ UKBƽTK1$⩅!A+g4uRL DXQуP8? \Z'nZ!dgn9T/8ʃI@jC ȼt. KFIs3~w"L*"Mh"'8c"!!:B\q Z@>wPbE w,FӀy4]̫ M﫟:K\7ߺSYʹ~~۸t3u\Xm*D3Z?'nf3"C8wPa5QMj 2ќł5ylj M74GcTWMbzo۠9\B;Rvm3W%ǭΧ3癃7ߩ4N3f7+HYoˁ8vk|5X)A#L >o5ÃJa׺Hoh>`xQKCcOҍx^Zïկ՝/'Wl1Te\ϝ^v..v1K#^$++dvB%- mͰ nmfYޢFK#G1[~{3ѢUg|GX?kcꬓZWCdv(/LFrL9AYmh6޿+JĿ:v#tȎCutw?Οo_w?|2s^%Z!Qhn#GXR~١i[WެinM*w]CuvyK⣖4Rv//{(ݽWA;yea5#du/6$j0L(6UBX> \lKC{s,Sf^^oͿ?ZE;H"GlA丱F2 Pˍ2F (7)>Up'k/6T^QJqsOF/qp(hdh/Th=dg%hb=xөeOg[w]g'VȃWo;=s< 孡<:Cy6SӄՄu,Yfю-k AG]Ol ,'aE˳#Mσ.%B& G$E':* \2\x. R/dF+XӖk녥!R"1DҀ,XњA_mfՠ턛 |Q +p_ O%E"&9+%ry DM ANr8{mEA 9tTyG$!34-7wd!*# @s5>L!Iy "zfX,?dy+kXGT-W0Ô1fs >AXA$K2ǐ"Ҙp c'; Bv }Y{=hoښn^ :->r0z،m?9hY++m"YU~K10!)crmLh95>uƗIhSvh7O}ؾ4wY # /A5~zeˑriqn{{r]ҡޡ?R{helz1&Z~dX}Ԃmӿm꫹Wce3 7X-^Õ[˟\g,ޘk#ޔnwXoJ||Kv%ߺ=!'BZYSCd'$@ݼ~}q.ȒvBr !.\22wS/}ao3ypꀶAdc,$ #NQMbJ'dbjsAHobz\J8.p 3 &mmT%20 ?JNV9ᡆ.V4!V奭;@|Nש^tL*~+D:8A̧c:(>^T92V@H%'t63<WN{oN3!;J_ew"O~cPZ@*Qͩu/7,&U8<䤪9)4ii9@0nߝ |8yy?c~׿_wLD0rԄ`&r#Z $X:2OY6y#0/LٓF"!O@1g#21gX>M#Z@vw^p4dLxj[/Tx|i|fˑ#<#Da͵K]<-b~mdCYd >1I2zabAjC %p"N/ |lU='W<aˉj,G\O0fRFi czJ<nx$IAx0B_ *"u #j'PTg@dMx%xZh Q#jFOTdB8GH0NĀs£Q+C@$#$H]R l99rvML:YkDJ+N !DU>sOV @$eܐ!ݾoS ^ڦRWÕCH]~uokmcGE^^b `W'l'9[lۑ,Ev8B_쪏_ŪE߮? Y>e&7xZû7y6S|Sz0Hbu1TdpZzqb=eĪhsXݒo~r4= ]ro ?.M w7a;H.FAjᑡRt>2K.B&jpv~O5Į-7j;pTC0#hG~#x2$wyӫ2K3]IH7yK [:ANkHɭd{5ͮ/6iB^/GZVhnrmu[-4z^ӣ~V{04giH5^2cL,y}@]j}c|tͻM p1;Lm:\g 'er54\YoB74פwhrBMh7Q]S-]}_;m-n9U3]gB[rj +hETxD}L.6DE NɾF3!9iT:h4+g4İ!:5Zjϸ  $X1ʮ8]POCL2IӍ6"JzM@j`,Gݨ 5Ӎ <Ԣc.4䅣Ǫ1G0G(L _D=$JxD HEήNO-w;ݵQG6؂̠/DUlΙTL<s ]z.=\z鹂KZB#fg Al 5ܝ(Yp(-JM l(HSC>'TL=E$ YvؠLcL-7=c0ףlg1|>t$07Zbɂ*gB lՙšP{+!*&08qO1u6Ӓ]q:̠h- S!j9ʈ;Jl&pq8ob|nD~3Nq]v+jDl(bU23J2jYوGo0R`#C9&~Suv!SpB8ऴ!0ZE`>cx[Ix|9)ym*IxL`PX>&Y ZV$$\(ޅaٮf_CfO}3mSZVgKOh\0J#3V@=<@u`ㆌ*N3 :H&Au`t}8_xB={q H8ry^*}8m톈(-ڵPkhҊatPzTD,ʶ6CK|>;2Q)5v5 iTuViOq˰7^)pùVRo,xvg?헊)\QTp1)rђBʾdejQT$L})$XKg.RN,gtm( K[BArSAaJ8MڡV%P=zI^:\e/3g=J`29jaY~R|~~]RGIH}v%E/%O{;;v?oOF_]W+)+cߟ2 =U:\zv1`?/:[~|!C=6 hN-wЏEo3?X~"usy50'W3_hR&|қLVo^OX'ίf72f\M?^ GMgoN..H;)xup:.ztHw5` >Nص.Y˳ɓ~.}X $QښƘMJ$iKr9nJ#J5I`u4DA:"E` ]Zn&f> )͎k_欙*n͹_ܔ,VB3qrՃS{O R{><Ori(DT -;P"{s"LPuYB MfUCo$gbPVL|6`Qch!aN\˂BlӥH& jA= Yq@xOWcҝ{TǔquY#f=9?nOo13Q=nިޝfz?C 2t|||!lh}N&"Ec\D!H&¯q)uZŐuGAȤER&pspTnj|7(ٳCR`uFmZ:  3T AaNa<Ĩ(6A$<8-vB<|Df\N!|e;l$z:uy;|J{:ЁT|@c&ŢzY>g_;ҿ0ôhD!aQyb)+ ;͸9k?ݲnq^3:1tiW(O J!\?xtQ̺TɁMҿA6&Jrzaхv- JN1h0Ώ[=g+< TǞov:nbع?ܧT\gO0wp~e6S5NAQ9Ę^7SiTsؖ!q&@f-.bN_@ǑrZ|mŀ uQUZs%3oP}%nlj*w6^F]UClwBB3-;A£.1$XE϶$t)Z2wuk\\ /.n1s;A^& ɂ!XCFc_s+] m{LU'-VM)Ujͩ`M崕!6힟| 4/Um)sJWd3TS`pԒkB)zz3xHbP>P ۖg3`kvD/> ;/^z%aC?%~^K%WX ;1^LaioW݁ 7-n`WʽuQy/ҊD_m6Ьǫޣqtw{N6zOvꇌ]^b )r^ѕޢO7?d&ZfR=xNqmJY;Sh{_WzW֬үpŋi0n8m-_P*;z~@×8ynvm(登)Gu=bd slw.Z7t5Y[5jZwsҾd 1*Onz;/j o ># p[Vߧ\\ۅI}Ը)N',i"]]dp=\s&LU1B*Ufq!\M֌׭jAUCcy3~c`n"ke.Q:Fkѵlct_G.'?~>RΕ; TMw &k7˱kkRSLj9 ײpTt ;Lf c7kx38fwG^3i*TT8AJʇ#Lã5"fcR3M5mFQ(.zHS7 P&sPaf>MÄsih*gS^bG+ o>܃y`~fa#.HvC*zNE,Zڋ`3<%cNso6 Լ z̪j75zpNTC()IW6z95* GpWc;#/Kpڈҙݒ.ƸX']4.[eR5$}HFu )^mhb搃)ꬱ hjA1 ޞF Cm.5뎸0Q) ^f| F UsƲ# H2bSNc{5`Qn<+p-S簾W68ʆE݀D)QnՁWϾ Xtytq Օ9c1'ʺ( ƅ%̆=ƺ2?sͅ\ FV,evҲ> k9+v6+X)…]ѻV(V*`L)saIc; [ٕU] #xj{QRR}J{Fyqj<5F_ʔ|AY8HYZ`G?vfD0lb/Z+Z`3&샋wA9뗓u[k/{1/Ug~dɂ1 TIk$B]f3a_ˇgt}QxtvE.x `cg{. Am:Gfx:p<@./БuJqVst%9YZ#e1){;u-j,R  H$1")eH%ΆZÌH獥1ZzOC HC l(+d-2:vDn~u¢Lt#Ր6-W1jdpnېMRyY+'Q O7Dhzf1di*h MK 4 X;KRn ЋE$ T54#|c(#L϶a4")۽X1z(DA8E9K#{v8=C΃_p2R"cQTҍڪ`r -T`=@CHuFBJ@2%fDi3͆P-F2 fV 5զSkB (\ UC_tCW F.̔xCaH r#-FyHFb<&z@U4hUp?{(0T8Ff6fmܬh79֢Q 6j`8Z;t"2KCbpR=')V#/Žvь%)ixs9zհ;[*2m@ zxXBV #,H-\8EOHɌBjz)gs|dp=8~fccv~QM5bUӴ\q _ cV0k&ɥ"d$bk<rm*x6k9\|]xM٩uF]hao+mM{GZc/ؘg{*h,"ݩQ. q?kZfW[QF%PP(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@W  ᐔ@L'>N k+`zJdƢ%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J Q^%PJCRC:zu0J XԺ@E @ZM%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J Q^%b$B)p@0תp(JaS{@JzJ D<-J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%+R}>}Ď ՟~Z=_-0 #\pKٻ6neUsNq9+@S4=8@3FT&98wW+ʲDH9$g3?r0JTJ| \M=`1gZ<urJJu ՗8]v*Gy1ubEbKxY|B'/NWfڇ+>j-r_\^.M3TXD"S>e0w˝>=\+\:KY%RQ K4u[F>yqKNhoQpLTL#jayP&FH˩0,` !Jg׵o9'8OA4oҸ/Nsz?'h6n_Z)W&33#{X2H,׫Uhi7rV(֊8 Јڧ Hsa|(ޏ{^9yI07pmRQP^>Lۍ'oF`ޞT5|6轚bƽggPlcЗKNj|,qR|6(6.ۓ}~Ub+S>4JM3I5xR&mzr>Ꟍ2 o< z'PTR4g,HiSIbpdFyK:gm ^B3?{ /-l-tq͇u;3 <D0#:c0~OI"u]e36VySֵ:-Cy6> ZS.jI鈣`EiZűa8&iTH1$z'w(2&gĬI|e% 8Ǘ,'[!StN$]b  ӈl0B82gJmW~壅r]i{:/'}@7s%PK(y1SX:iqE:8+t>y75>\P0<<# d2ҡe L]oG,U:E~Ц~~v\^=,ހ9Z80) ੢轆ȥb{r{yo`BNY% ` yQ I2邌|AEI1_-0f&,0 >OvO $ ZAXJeg۲dfFN.ʴIkWjkJa@׏j f+⸜ܕźAn%p~~Jmf6hu687{ ,RrP5.:xswU[Id>P&\ReXing"VAtglbҎS(\LO4dP:eS*zQqKMJ@-GJfP2uj ovyv~[[mc!G5og5+j*{Ms4G|ZWuftߩmʗ+jL^f;$*YRw2pStUqnNfg4s2O1fko$럗)49Bb"N1nJ>do'zMBX. ݵrmN*A; +$6l^0c3NtU`д"`q '13 boGB{@|J1}(Ԟ]ُZٽ 3FUqR[c gGHN88DbH2"]QYg\ڜFFg7z~DI2X\`| Ml1zӫmbTXF6l,!3#iS`!̋ȈH͂u+ ]v( sW)g8K۾%, r,0) =8U )cD^q@9_qģ<;<<Ŏxh!]`bA d_'_JLn2)!Mz׭dmU^z5 Cݓj>W5!q#0D ΁4~vυ f `hޏF$AAs< ~H> wϺ?9F%&4TZz̥ ) (DJp`d['b8bԦⲇmȹݦ8Mn5qhest>NAչ8(kFn| ՂĻPA`G # ,dQ3W 6&f:E᭭C]?LP~-QttճhŖ<e |gߟe'j=D<cAd `Lݿ> 2+ g:)hO3u|dЈE~h':{-cU"N2IC[d "88z>2 >0$%)Tvr9xHnG?W57]v6bmop33 2+c.#ӏX锠4˥qAQ\1t0*k)B1C.0=Jpg.@SSl;Ҽ{eG["l6UM_qžHP08]ckWuwP!EC !tA '9I1ݠAǸf#RDF`0,qt.)K^ ҧ[+w^;ѦǸ9=]eZϗaѹStBSHN"|"Ty!).tI(EDn;7n XLC?80l֝" :`.I;ũc)' cBXDT&O(CrSg TS̱@QB5v:.50Jˍq`E=q(H >q-$yaI(S`1@\P֘i#B89$ Iy7_ް51.|u>zog&lL0F0+o~U[2qMjLAA(]?ܳ|+"v^ȗ)؊3{+p F"r:Aҥ T(¥~<-f/OPgſ 0!l:-,\J%marwQ⢣whV%8NizF{S*D9m/J-tv֟]N2`F z6mNNE7F)k։,az4֧t*5ZBTTo~(_Wb8Ҩ!|GqTcKrlWhƥ~J + bee>9DkmouɶV[D7  F;}79Ec/>>ŽqfRlTO,U{. Wm~/}sxWo~x +`fpF&{,N]5 -޴@^K.{S|ĊFiR(=4rɋ*ZZxYO= b~Yˋ(:U%K_EDvx`nsѮ[uk`#s:*!2Ċ@(TYνW`ֿg`7*K1N>խq"p4Br-p:U82=kBTQyyO xɞNfɡ'-|:Nw6Uc;OslG1"f -;:ȃt> ie,ҙ8 c̺*2&A:B@z a{Cx/tCЍTD<+ŤWst @Icʜ.##^j1?ӽL̡0ʹ~/V8VM?n 1R %Đ N*i-! -EV#Z?^͝U-0^P%Loba̕]qE 8>KuN~w}"5]TV\T\Tb$D%Nj**Lǂ%%zV>bg4PP1cLYDFyk)CGX B4ZD4~f =nQB? D:2V8a\#M$U$>FN#\ SFcb4aF,Cچ v;P?Nfr ,gI~aJ#F aRGFJh$g96Zy>DFxxfgښȍ_ɩEUT^N\Ix}U")OcfHQ(Qd<ؒ_`x`TIP+HjˮkuRK"gIQ!XHȄlZ6+lVL\pzٰr5vرKk~eDx+ҶZ>3q ,:Xq@t@`6O43Rreg1O|/̧]ƉVg}7c=UMOc+?]E>Κ/ .i}` v|zdA~),=ڀvk5y/!zs Mt;WDЛqКCT bY5#NZϼUOv.zcӭLxSZڢxɶkUmp*~yQJEf:1Uoư6:/̶5m tAoo*ѭd66!orlޘcSoX2;͑qoy~}w7{zpbO-P'8^ N]`6Λ_޷yF\ț8?o+/NAW*vٳtY.~2/Y<<|[D~v<&Xm-H jS] :&hUo'a1,uCMOOs熻EV۷~8hϳ߱'ꗶ¿NGގ߿im"m6tv3bgFǟ5Ǐkv B !XK \6H|fw x rs7yG_J'=9(~l([˲98M)wU", X/Llݣ)2uyk>cI 1xgzCm? җ m_t7w3WE2*Z}]r+Tv^Jjuy m6Zeş>TM-Azvx_T謧7/>H8_n4Y@Sw3aۗ珦SݴE5ٱp_a[Ľu qĒ7t3/ IhGh8LxM`Y8\h3邡al_{C S!>cw@R`h邘MXB%>aK[biTEXhJTmS<(R"(Q9t/ ϐFb܍0>uӒ]qQE6​ ހ -8 @?68A(CN!"AQ-ĉ\| \<,!C 8j;Wm/shpYi{?S N9ыo'n&JY-Fk_sB zw|$G︓,0ɞBPLz+M ΥHVir-48" Hh5oOQzc ` X5 }%ZZ#1HJRlǾ t־*9\}Ɛ⼀Sxs0ػ{U׫eWHWQ\?>.}C *HVE$A.Vs$F1Afod0GoeSLݲrGA8ΐ29KDTȌ8M(2JJ}ڤI9E !1Ţ0*!S Kog |=ׂh2J(,8ܽxWi*Y~H-ݰ lh#IqU6)\ PadP10vQD4 ea%΢Jy4c0b#wӪ8{3Hp0q͈$yscޤW> A+4,b eƀEǙ%r8PTHȇ5aI0tT %ЂcTZJ9gr_0[F\EQs0ڷ7? v{@ REBp$rerzW I`X@  Z83Gث@|tbG7nn}yw3,5$"a2@%* *&Nмsh+"-fΑ_L}1ٷ T#i5*ya4IFQI#T@-y,(@0< ۫;4dQDJ_7hx8>Rrj|ӄ{qtHMلQj;,;%xUWq>-Nǩc wN,:OQ{Da9}d/OyzzTQnկ.>rn,tj|Zvib7g"kM'm.*7 MhI`Q"6R(xQzorUKoqh*XQu~'39-%z^UWFnq߽>ooN'e)5={v4Bc2$k[):jMatJJDsEP*q^ +˝$Rs]-!9M3!u"riUXdY@.xv`ZOĴ(Zd4qDŔFbW|}7e@觳{ ea{³Ϻ^?"aStv>9ZU3ϯ~x<IY8.Z$(Yr#㬝3/?A}ד/Ю|<[9"h[{7ѫoKȫfnP.xpkG:^wڷ_k'UWA6J߲eG-V]?|gM|ׯrKbfUyz.C/Wt1[z:!.Ϫ\ݻ81 ƹ9ij$c5.bcCuQYNbA率|ÿ?wagɗ'DBbop,a?~aB\.aDsxP 'wUs<:=ſE.3g_(l%}˭|vN~@'08W )TZxbj9v:̩QR(m` Og/FlQ5#FA'PbuǀȚi|lvֱ$IO0C6|N9=!8BU%bur:q(PmJSq aj{ȑ_6_ow3_6Xfdc]l#dWlX%[vZY"*FX0z\'!!Eymx3eͼ}{\>}q)&: ޻XE=SZ͸H< To4_ ^}38ݟXZ3UVOyof7I17?}n-RUsۣ*mbn"hMh^X$t)@!i;]O&Y}yz>lsפЈ4Q]귈Y7k%?pVswX{L5h黇zOp\Hc6%;)(m54\#3{ = |Aƺ) nxE{Jb|PN=L*<'Ș VR"(^(l>"ƒQm]?Cqhb\k](:K N_>v[. =S`t" KPI,`brt,@9B3 Wנ:_vgz]qa O0xpaV ho٩IZmҹvTg=)GMԎd7X%d,Qhjv%x%s2*i3`S#]wA¸^=Mg7VojVn:xzrNƳDEad();\2&cJ <(ztӋ?_0:^Pwt=ӥsy:xm+JS3b`K0` z[z,߄._20)V , 㘌R]r16^d$M ~pY )ֆ,2Md &JlyZJwK)J Sxe"AV&D[@y'\dЏCbxy9R\#QIXTIEʢM&;dmҙ85 7 a |¯Į*+$bf7 [FR`7IBbizcهL(ٽ2;Ngg<v/[I4AXf=Y!w,.+jd%gE_'47)g u WGn 5(wWx=S`N{Ek/xsT h(ꆽX)T(ӻȞ|* Ɉ 39IEY -ҸH"H4NPZ ;e<$B6ʤ$AP5)-8dgHP'~}ܒ+01Nx9<8j<\ϟ:?y>aO36MƏvE" J"1&ƛ%%"ÉcoLJ ijz=A~2x*&537t/y lԋaZR.:p2ydFOv8I󤃀L4IJcR֍OB7Elftd:=}&$&xW('!rKW@XHGYAQ.8TaGb28g|nIM<]/٧Q?O  +_(J2FU[pgoqW`ǓlKdY^{՗z;>L0B]>V^u/oV845ޟ ^wfgQ,@OEd|9! F@wirƨo?~VkgOuNaԁlE.~ 3MV5b-6{ayD g#ecf//Z Ŧ\VY(֥XbQ%:FU08wQ$(/|2a١) @R\$:ż3qvdbs#_XS(\.YF" kHR0.g9qT8ꪷs޶|G4緃7W--l- KC^tjz$/Y8ũ-o nO.O";FL XO&72dα#㚜_$qlWqiWM_y y_4 \O_YQqm/nY/G}0&gKR(Ytd).\T6C s6AgiFhww>ٴgyޅ}-e/:?q^9f' ̿):F5b+,F Y@KmsaKDC^]|v5eo,ۋʇ-ʖ3d[l-"Ym8e:j3" vY{ƺWV I*@ΊIvJo*[', mE,h0D:hi Af2ɸ#(db̌J [Czy 1dzbR=V )V;r!HE.yT3w]kh&zUW1ӿ Ff?8K-{Vc_'@?(ߚo %Ԛ NNqdOoq&?`i7 D# f0t5veFja^.ivէW#~8.rjGAVʕ}F%[>ATx#0nIKY}{z`bkZNK0~[t-k4+tPZ! *^b0TFԑ=Mt:9\/蝝 z]밪Oy\Yy[>(GfhF@X؃A8yyPId՗D޶{3%ȒKp^ޕf u"a1@d-E A,vpV.$2Ff~IxF75a+q} b|6̇ZK[UZPyX I#b/1Em0)YV"2wKz AzK9K"ʔ 6dg-x_ˮFiSE3%f/@+^ZG*ߴ)I|yz3T印{>ekL*䕫׾GѢʳ^b1(]LtG{J)mW}ϡӨu& {2XKTR0X\b!DTocdc}Km@[(`P|pBy`K4BnyY:j Aoi$ !`[Lv"{6Hf%:W @BFYt") x$'ɧs?9!} v Kh9 {fIݻ.'<{ <]55{=:RZI9:ZZޚ#,qxhBۥ d2OYlpohO[{h H{p.XAH}9JBCڄPSڌB4}B\O/> * k}A_SE:qB$P%U5<¤ BmEHZ l%*D]m ?֘W(.Km.+?{Wǭ]yU`,7xIvM 4hFI oa4(muKJd5+v=Ugl[ 6Kgf ٮ+1XB h4ZZt:*DD+e(KMg5e\x»-QkQG L w\C" $<x@NKxX떨})ɠRXg֗Q^Z.\5>~#-[wY%UƹĹ,WΡdABl|RI3Ĭ4IO!h2!aj4xгWIG#eF0dd0I :!ͽ @EE*:a-xWUNQhQ>f"+aIZc"Qd6>I欬gQ aĮX6$HLcҔX/N&+021RBZ(* T/ez~/Ւ%Bd+:Lh +ɔI!Pҁ@Qi`'=-wVDDAEOV|C@F$5)\Is @v*ѫ8g;ȀwԈprHL:3Ε4ǜ% 4 LގV}QjSoʷz0nZr%j<iP(b @J9ԣzlئk vڡ&h)%^ƕ.iK6[Ls((k䉜MsgUwe0-s-?RbyѽrËKroLC1Cߧa[xtL@ Qу~ữ&R+x(҂ $J/OPӳn$Ƌ7ˋ;e%-:ԣsi%ۯ]tй hftlXs5o_wY6:x4Yw~QQwY&%QWok˪fM&kګKy V֍GɨTX9hQq'"|L޾ PR*v1;~s"VI[A Y ] ~^ܵ^-Qys:Z4ǓNEWaJ!>B u>Ń*(dq36~WO9_IxçQsz&͂+D<΂{4Apjpl{wdz:bٯ+q{s#[n}mX_ymԺasOu(̈́`%._-1Ka7.D eR^EGF<ͬRV9u1%Ȱ2xԵisK?u+˺y dzόt]\lDL)Du'Ĭ )_ƥJ8@L)-,uᭂP:`(*AAأ VE!!2 'rbgiRdთ& oMu+I.u)\RemmVʨc 55vƒjFB^B>|@Yqҽ!]-ߋ>uffpM%| i(aZ3^v(GT :f-KffO ZKL&lyJco}1O̍˜k-XfFV(6sqLsN9'*`H!*g(_~GsGu7{1-Sh By.AL 'gH%LcTbA"6K K3m.$<(e./ TلQj(ud<ΣAIRB܀([*<}-QLy'sb`uAW*ls{@mɇY&aeøW&F@r1#fkI7MVL^xՉnة-h\e m~]HYVp֔ h98lW;ioaEtoQgOkwy )<3f&€a"I"OM;J3"mЫzeWqB)dkJyHŽXxsb+Rɱqʅ!\bc:juq;zYaޞB ]*&Z3zƔSrȵ3Zm@In3j.V"&Z&S"K^4WAĹ0 k>uV%EUŀ.X*  WFI a \q‡R4@"C[ <6;P~xvKP2Ы[#o ,>NK7q'%G][Jh͉-׾Q[L!2[o1*eHtj5w4:"{J m裔e̓RhG%3VN -١6rS>;y>(M*5̠S$0"\@g<.54Gkvy.Ëoh%)( o s~C-U^/k3'⫊LjS_ _FG+di\ҬU T.0F1_ye10@];3AO(` <"p]xb^G!錷H(SMHW5 )gj{H_!)Av7p.{Ţ_-Ų%%{A{/%DQCǀm3~*eQX )X NR$,Х"cWZF7qg!5P|G|^ p&{|x"}}J.ϼe<[*xVT/c{Nf;)*j?6vy̳_ꆠ L\_2i߭/NFdF(.U=k׵qx C|7]A&? H|lO]]bh|?f/yZ[cK4Ukannh^ο68]Y`1PYȄs}3Ό[?q4+v3i 3ԃ\FNߌx6oUeGW'˓^0wi Z4K$Ծy?=7]KI&2m5t-!"&:9U3ڗ:}>>\::oG?tNK.xPtWG|x}0@)mqd{r1t)\ Nܦ?]˱׿vyapE%b\Ak}>5Wqg647%{5ˎ*>3ng$>.XR[P`皴ݕƾ{:Mb~V唾ƟM'h335Jh-f^ѿɁy)9Kl!Ў+qHB2M. ^=%`*:LbH _ (Pds1sX .f W(M ()8E0IF]n_84xX1|.^f]L/S-ٻP !1{i(ubS^hP"euR)<^oI|: }" E(6N f?k+Qp ת%*Ї\R*5׾puws,==;ؽ?sx* .gdd3 ozVy#ԬRƕ} ($#,g6 d!!j !D9⭒eP]( 1ӛ/wy%}X-_23As.]`P;SH25TȐ5"W SuE]sod˾wym}x۵aSg.3cnO9 &Şc+r_b9*Al0A aBVD^!:$Vt^;: ,N 2͠m !k+(%Lb>icbXaL$<ڢV ClsRyӰMTqHQIؒj\~Z@Sq¦vYosig__7KQųj2~i *)[T,BcyB E7KK '| svH+w̟eSp VPJ|;=Fw!1׈{ϳnINMI5. Jdi/7M4b!Xlv̰,NEH+#JQԌrTSRS& "mmڳ8}l4H^O&B<|a>mҺ7,&\ι_=1\B6$rH:*LtAׂlErRD ƒ!7"y%!`#SpHK `:XJg M5x- x2_1#{9E,tJV:U\s4KV)= %iavv_)^28K*A&++, t@]]-8GcBf%bb6ɤDT)FE!` *V:Yr2+MD"pс'AŜB d9{[@ 0Uhl}Z&ΆuA h srJ/^aS0A&PABQCa+\'P† <]UwEo F?/OݴD0_SWiL'IZ,Puх?ũy#x.ӛM2DH-SFƑN "[HZmE")?GlG?0)4y-[oȓ_t3W8Z$ļU3;ℳp=C?J`(|:kc+#j!d!BV #[^|F6L?~|^qUz׳z5}خ4ංsRW=G]3?qu]]K"mzj|kxvg՗'z<]4\p9o_oezM]yzFp/Mֆt~lc!m:fm4WzatvyS|vg_nfg?~y~'./}pۓ|->Q}M<6@ݚW j"dyG6e-n#:5f|Ŝ >fw0C``Ogrv/T0TF,E *g6Q(n.C#)x#pocdhjȨLbY( iօBANHkȪY v ԯ㠱gzɁbSt $9[6HzJZ*i^{JA{Q)]\JġUx~+0x,! ukON &j<'ɦLB4jQ&& К=.;K $"ecaC3~RKʭj({E}UVSLSBAH̞@$%uԝrV rvR:v@:6k{95hk \ ܁q"jvS_MaDH.4ѠF*f~^"G&D6Eg+I+>6Ta{ $1;/< PT$J{ Sq&{b^ټ72X7 k͠}ݝ؟,DzO.::zGGCȸڂԸ}R V׶A$GDf5I9Q@Xo%N:O*ōF`Ckg@ڞm$5)}VR+Lۂ2[/AeytpޖF\nVׯjw|f^2 6kA,sD)2G%F Vd`[S owKR2O/EIA>~~d e֦l#2p=qWջIe ں]SZ1 SAL*a2 )c;%aq[ml!_` %b"3kBH:[wFe;4!UTť52_'͛ò'g%dY' E`:?I岱"k@1ɎV *`vTX'd.3aUٻmep˹ΡC3;9#tV#K(9u;dAɱaNX6AKpbQ#1@z. 2 Fryԝ`6fq5t:5.sT|oͿe2sfc#݇?oU Ut?53*|86So](Z?N^ "G!I v9A;hraUT) = A#L;E{GRPx1nTU8jx&lV aUiC(tt\:R*Z\6vmaҠ0]$1~f`j, ;.=_3/NN/ $<}Zs \~yc 1La~xAҳ&K]4'Y~nJ^/Mbv05jaN̟io7#t6a@|zoݹ0}h ugKIW3Pf65 ZR26;l:9_utudpDV !ZxV0'zja5FRܥU(fHp1kM^[*oӦpcP~W__ݛW/~}:|_`RA{ˏZ߾#ֹVMSv7-mߦ];ڽ>|"حzQzP5F:GZh5"'•z 9e\ꛫR1/tϝB9H GAy/[l7sE?ac %DFCVX15U6%R@iFe)ɦrfٰ= }!ho9-0:U42=kBTQyyO X~Xɲӫ }tgE;s/Ӫ!lӄ0| ~lcW6%l &c$Ǿ\%2@JFQʓY"Mök{XUOznqZ+6A7گ*ZzAn*S?vuxTD)UW昱 [7[N| 6i=*i}Y PmWgW߼wi}IC4 ט"=SlOe0}:%]~7j5/jݍiޭS@>R (ˣ(V0#*)dFR˴GDpǁG}+kD)%z`K3((#2m24_m,lĕLV^x}XS5DREaiđmU&cY4\dNb3dG]vpc2_i7] jޅg[cfb@LseDIQBaY@ S$wɯ=]ިu:jDq C1;cF;NmtR #Rg |1[T)) |=]8Ss穒a1 谭0j*;(~j23&Mmud{(~90}(\]Eq\}y4DIi 6%jTZb0釃t14;Zz9;I+t}W珀.N7uώL_8z~(u棔$m(/]w:|bܮC>|_Sg̗D#xhXhFg0wėi`ID9Ihu1?3^r =n+ 1y)kQRGBTuKtQw4^yLOɌ6)$vH.jlkxWl̤zFշS`u%3VJ,2uQV$>;lrjW:Jpښ!uZ[UxnR붢\A!}S`<\Wٮ5 F7-(t[Ysz 2ksQo LF70 (͔FQ=۫^J\_b,Jj6bWBQc+06!쌪 b36{/LyYzNTudķ7S.+Yu˔KT@,X "Qkn蹳vmZ=+8Z4oo|`v<]NQo7x\5jdOV΃֝ B"/eW6>4Gq,obbhQdQ:k,|>Y\Ub.#F)B]`/xJƜE*舨Ԗ(x9Eh]PF1z,4ţWonʑ>)2cUH $J)uҐJӤ)orGi|ILjٻ^ga6&RyJ) GabaI.Q3ڏfV{΅ED-bN\6(%82FKRt8+!D!e!x0k5f,`k1d iLxs-[!O>qS鉳~Qx/|: ]AA #d'i阉OF 9卟 IMiX9N`.0 DʩR ˽)yfg@ GilÓ,؏NbfӓnNP3<׷}trQdh7VsjwC?YQYP̙D:4(X i]PeB9`GҬc Č;ܜMarI Lf'j#@8q20c ؕqlI^("4XVI4 fyf+tC2'ZKؤB:`^%@E.Dʝ5;8Φ~ jgӎ;6EQ[=0m q* 0^p qHYc+o)-$.ȅ4Eb`!3 hG9AQNq z03:Hc2a6qvamԯ,0]EĶDJFǔ;$)u;}İPU`ϩKm>c'n`"jm8XG!1pz ",iP΁%M@R#1sLfD&nD7 X̦%wEɀ.n1N#!J! t<`i-h y\|\ fӎ!όnxcSTѫ#wz2zl(`[ я(:{c=tq45Ƕ:S9q( 〠LQ2pr%ESB^ z~(ݡ4G^;* =L )NH$PL'ȈȺ a1o&+ vXx,aa2P@_ t.qn t629,޾ѧlr0/^mۀ :]Ung +3OW*cUV ާT.RzVڈHIiI&!owU}[wFSݎs饧Q,y  K8Qip#JkdeHZ$c88 &*P j4!13 NR˽xM[N{lH8Iб+aNvRW˹ DMh;ܰ!m)P=_\Qf11cFט#XQ"2FjD-̢R:'!|Fn4qt` ⓣNH/]cF_͚UdHrѯ’V];Oh,i}k!kgzv6ɧ$+'h#X #}d~ָ =Fv ytfr8dCJcH93s.97Ĉ.Dj~$]jdtqj_],S'c+^.sռ  36OArO!1M/ɡ*{GQ9>j&'@!4-mrЍa7@r!9Jc]exvwDŰ֝_YSoMwLlr"n#Na yAJ8#|Pu}0V;Bn;yE-&Kv˛'!!fN;ܓM 3N'$&sM6~۫:Ļ<;_]6}v5 ?}ݟ ۓifzԓ*Ϛ償uYZ݋e-pg&懏 ~P1\\n9Rm.߽-1OWmҕfЮi#4ئ;8$o7_mx{͊gWCV^﮶ 3Wo/^_n;©|g\mؤJ\Wrzگdiwr{7JX/w+O|Cѭ:4Znoƾ]+7~ץ=(\wi^dZ]1o)C|]8sso'2sRJGWtdӉCL; 7ͬirJ0tNI/1ǤTǘɷ3:[<^+aҒ+=Sk0/J ܐ(/A_9>|1DeSRBC vu(73WZ*RVM/R)&8{i>XѕҒ/]WJsu *'pΙѕy֡Xu?+N׎FW_;;q)w.aDܙ]Ml12]wV]ݦA~t%ތד])Lk'RFZ0?ʢf 83RܙLu+˖t%0q=Zѕ҆TrucHWftYѕR.]WJ\uE)- +pNIMe3 Z(q2* 0D;77\8ăkP1F9 i'4Ti%Քr3\Ndg6zgFB\VJ_g,QӉ3%ͽkDdyr7 jb!] ptdFW;v"i}ѕR.#N#; 8:'hLQQ6y& n[2c4+D\3R jCYuD]!@tJpihu+ҕϾiDGIWJ /,z*@J J])n4])-] %S .QWВ8B4+ފ6u+|iZaks%0hZlə&&Gt_)n`+aF*=STü%y1ܓ*lg*xJp0UPǗ;a`ig;_WtVZ_T5@MˋؐlFW])mtJ^$UW {$Ht%L dEWJt])ei+<~4_κquVz!=r/ն4Wi57?'OWc5˂Α~!"yO-N ̆#B]|?3JʼE%~x8{%=tkv?y!BrXïߜwoֿl}_/_4H5#q}Bl9DZ4@A97o.6ۻ{K]bx~v)\)oRQ5nh" t10Mk!}>6n&$@O)Aus&ƶir wMچ=^!nRwo:@襐'vMR}>at4ǬAp4|Gj]  CI 'o a>i04 -m^jɷ}'(9m7LmL-ifԆܝzT7q$}L#9B# [hHi ΍'iyiM?іG,8"F,1H]d97pV" 2]4.>E}_h 9pg};{o(2I$e Op^Z:6m;JUacsnA*ZzǜƎԄuOT?<(9?n iUE8USI{k%hH%%Ɋ] sr=ʜ%x&kԱ1LKctStciBƑ֑~X+&Ƭ(-Nip !x@DC_xH,&զlR\=D@QczҼXt):Q} ` ]&|}EF>{/.Qc=IGV RBv$JkCvYvӌT"yˌwh|z`rJc-OXUAEE'N@kah:^vNq2Qh?(QLnBWˮP h yֲtX\k ]cb!uh@ Ck5Z0+3\i!. FU5QcHJC6, *T*JvdZ2ɷ^#LI2f,؎qB܂n]UFݚxb"I kG !.)bDJpPgSO ŸrPf̤cB8*VZ hM e*3[!(.̑FEUWfґ5׎KPAw/uUd^i;TW.#{/hZëIUDIAPVKE=#iBZHH&9%R,qPBi5 YV+"1۽@VQ=r+ZQCk>84i\Ƞ 7(9X0~)cMDr|T1&bb󢐴rbB6!eU~/jms֜A<]-XX^ mzZXI| xK!A#.ፍPӦ d]t4CҕjI#JUe ,äc*Jr7z_fRcQBgsA9AJ# I"9d^UP>xmBV58 e8ni4/!!dNjNFW6 'zI}!1?ДHA8QNZ{*(=u@B%aIPrm⊙4 VA^A\T"mM곩\4bUf. L/ caVCMZKPdCJ,=uRՒZ`4b/(m*?u+<"B.8! R GQ/:êoz; 8 X:H>sr(hWڵu_~_ld+wnC~{Bw^jWe+WŋF{q}`{yՆ ^mф\߮9haAz-4G;eȊg?vmwq.hRl? _rIl9'70{듃278lqgT*"obW|B\\- piK^zCG ;ypV2WZ>pEV~Z-.4Z >۠&뗃^IjUO0P;E<1K0߳ ~my#zN?o^ll6{tLFu.Eٜ: &'t̷;­AOzZx^ '_f狋 W7KtyLt_~looΛ4}nlswE8FeGͲqgyYyX%gjSsEGoͼmxQ.zsvDd9xzw}~z(3R;mBkvgjQoQx2|ͻmnv󜜟IF)^ ^0tமDtQu׏ț,kxmuz һޖ#zO㿼?. |xh+\.kڧkaOIJp%>}QY90%b :(=>pև:֘~bs)h^(y~99*],A7CnnX!߾wƕ_w6X/酟:\dQH)]~zꝋȯGK [ty\?=Wt|]kN uHxak/ҡ:/&DSt-ӎJJv:vW e kSms~}z N\Wԋ}?QؤuVf%K șq8U-&LtX[͸&?TjzD5SQK+F+e)եYgҞlGbYwZdzFM[Ջ_ =pJ :'Ot"Ily^^l'mnXlhz\G2!ZNF@^{Z{}0^c  yP[mM:oԸIq].DO O Qt{~qz$arsRShotM,C.\!ONg\E;o<>ۿX+8s^.V}xl7:ގl^xwe[}mi z%cX3jh3zOAEf7m.;:mU[X#} BZnHYY#Eg߭/}MƩ<3pyQOb{{VB8zoO|T|qضp2fBm?ʀ`_MǩF{7M`ݴf^}Svy7;HC1(;, ~z`UYl/_?\_,!y͚;YDdzW`66b3+K V}U[+u!ػFnԷ@>y4E@Á䐶H/NÕd˶֒m^Ķrg΋P zK k;WiXkz?N:ğFHeTd/ .NhKDU($Ŧ~ٰQyM+=>s)_طHþBWNըmyMg:)9 @o&& rJz2HdDD,Mt,FH5?;˰}>}cc sǜ$pցY]~Fh˴SgNX Փ&_x"0?+m_Mw!WM}.T 1Dxo',7OexQuy4cMɢnT`:ȡ+  ՟&SG"Ꙫ>T>X~3ҥbTP+"TL%cXL 9fKaYb!hwzt1<-qˇQG?ٮd.q'eP;'3ۤ# s&j%SQ) 0U5:dU T#g?z244ݧ=@k}v7Rg_8Oŵdԑ~㢡M~S_=vaF17o;zQihS *ބGخࢽ『^I)L,ieПȸ9ijrXa*68.M0 6JބH% (K艎^DK}Aq`2i09DIs$(59رFر9zc淋A5>:2.y7 f;σ*v~G<*)9:EFo,P:w J\,8`GWbnT:"*`Ņ-Dc$)4V A%#>B\B| ?Ttla<>~)[8Χ' M9uxբa^YZ0e]_vb6`%rYo[>eyQbHwY[zn}moBR2hkSޤFETMȴRƲ 1LI.tY(tYᘒТA<`t1^ M yD=>EpLL$`)"M&) gN#RtAEJLJc"g?.:O*J /.nř<"DՔ`v0;ʦx52ZJFp\yM 1نaZge !ɭ&Oϗ'OELIZ4(MBs41# lz_U<^!K"'FNM0p&cHu"UfZ$wU#@9_ w0*]MΎUPЄ, %ǍF|V&F@8@ 5EK>@O0KtF2ˁ۔N&*Js^' ֗$Z9jՃZq9`',~w2jҦQ(,4+|"0_#)Gѫex )^ AN mwgc6R R1hY.d2 m<"D[){YE/"|Xowx0n :%<)4(g b"Y(X0  5qvz!UY:[t-%x{"ҴC/ h $J& ay WT g!R? r(jkͪk˓d?>=&鿻w+ٵR\lڊF?Ao&HT s:UϠy4 yp mb \J 4,6$J‘mJig\+eT !aαAI}/F[a-:dPQ80{ږ8'R %^x=hw,_wXVKruEI\qưjj!EGw|s4;mc'igRALx5@ˉDu(f\mgt4kޘOs?^[Hmoeݪrkg'a Ү̬B”du_ Ja-gm b5z5lߥsJ4,sVX0KX/ s9Dn`rerKiOw(IۛZص\[jWPϿ ѭٶy^fHh}6ADQc+?va!!U5of%͹G2֥˛#P +pjLs6(SQw,̂6vhmn;; .y}=N6m>1IFJ-MuɎZޗŻLFc0]X6ŊM좝qǍ6΃ );u;[BD _+z ~fG%\&#,(*dggل(Y`98!:[q-pTq/tu1J*/( JlrIグ`Ȥȓ6fJoE J+qӫ/xӫjTLwE\{]l}wF C$g' /5P=7ل._h<5W͹?WH&XQDLi:0CLҖ${TH"Pp25/MrZ[ v'BIr<+%4cR&%,l]Jq=љeFNzAХooe[ST}{/Pv^3@'Ō#OI ;&/"I#$Hk0l,9q9'oIEUnNK@G?qxrٞduvf{ڙZ za%V>Ay3,'gm9{#AMG}]Ɉ6D ՓXK. mʛky!?_3*ta5x.utG\@, 2ps.z..q9Yt/j@'x5Fb I *eɮT l'=:B;D+)Lꖡ2+ZbyL &)ZTd=rI9c,[ m-rk0▉];%ݠ{Y`2 id䆻kmȲ/;AR=n$~H0)+H;{HYhVaSSV߇6H"hghDb Bhi(~̚MB*J$@ != &D%L{9 ?.I<Q٦7y>E&ƶ0bccWF2y=#*AWry Q0 9ApPj!FPZ :M 685I*ʃ2ϸ༆(G=Z ZE30 3bcpg*:Mg͒]yEbϋksLGoΕaLk*j0N$0!x-|ؕE|(vP3AQuWG>{؉vz(*g z#y?*%{?Nw|_CeH]q臣*@? '՟os|˛37;.aW\G7ѧ+On+ O?|ZMvV,d?F>x $Uy\:?U}'7S'@*Zdt袭(=B.;ޘם;-p6w< hx<*R'g5Ɲ+r&\]jFD -(8+A[Y4Z*xhн{ܳukh-*ت[ឭXM b[iR.p.TDJk J֮tBj6I@uH]Aߞk_9Ei6 yjo &+^jELnng&B{mCN~ZuB^ M_ c[7Cי#Ѝ٠'vʸ/qT@|h@g,ּK\NJO'=q`ztX_Ӡ^n |N,ʸDEPPu8+PE.L f=`ߐ'cSpgYMB&V6imçsZ/y7&ט[woyzq&)`:NGK yW5$RSF OLp7)5s*y{!E7饭+XDoNT=7ڤCxoB(A +HKkh1"锨C.@hSX)Ŷ\ȁ|9CjuXzwP! ޗ7?pOr8-oc'W܇}|jy; ؟Jrƽ\&U͚9b̫ʜ[?O0rz3N+^83nmA;|/$JBk66ƛ!.%Bh嬋Rd CQɕԣu^5r&Hogh)Uk9ê)|O%3;цU ĩv!Zp`2Qf$*R.u%5StO=iRnW̝Th+NXVMbͩN42@,89%\zE= c%B(I`@dT;L>8?2MS[@u=]aZt"_ 7>)oRiwB$!(2 *x́)yřrBmC䌓/F`46'v ^MaJ_-&;i]^Twot@F״]-Fg.4do ,ٳ{IQ`V { L{쭝 { X­[ 18!giB2%o)P4*.4JQ# 9F0҃jǹ2D&]ϡ)ξ 9~qYFrRaû_e~.?tfO, ZΐOIz* ̱ q)'acXyO2A VDyRAjhv7"* %ƐstӉą!2G^x|ՄFv-xLPMEI9D$c=)JZѠ-ɠr2>Ι_4/h}%1(i RuZ0DoԳ?L]FSQIz׈ˮ%ev;||ģ&Hێ:f*!9t0bOџb#u&R-/e;(z}BK%0/X#Yj\F* s"=ls<ǘ (l< q&H@c&@vL l"*G@o:Acp4ȉlv)qE:W YufA7ɹ~"g7߼x)vx*:$K#NSAs=WDzRMz a@^,85p r$C]%8K/ &=@@e˓%C-J.[ r"q+P< ܔ4˵Ow@C/$]:1D gr!]d^44nGS76{ T-GPڻɇKܘ}IMrV:pV OR2fü"9P*FVG#$K3J1&UB׍y~?u<'i#Zg.D vYcWa9CF*s۴#0hr$ieD{+꓀_+e,G}N[aG,(  Jy9TsN jZeR\*ܪe)x Fu(Ebh18 [뜡DB0*Q E!F8kպ18f$^B "ͬŽ((Q2H\'vLS@MiJ%NW:!ůu%i?R~ W nKt9:Ƚ#?۠AC6JYޜ>]}vw{1!xD%HbK/s3Z)] pv)~E1v4 y3QQ̅}PT[P vk%ޡP?vR,š ,",Lh$`\PT sٶ*#|J\5L=Ee&ˬ{ NC)w7YIiރƮr>?feSs[g]lۑv/ܖ&OU].V!6,?8xCrJ8Sֆ^X+tzmcks5*٬Y+mpt8xwwWw9?=Yۛg5uǮϞp9d|G97͛y$h3%`\vd:\zV ʚOYgn>W=/y'~&N Ga#i.9Vw,YFx]2Jz/o8 t`*j;B9]mCoAW]j#]!+MU+EW*պt(5=]Bbq蒺ʀ ]!\ +tRv(7(]ew2\ ]VӶUFihOW(M k;CW]+DkXR^!] ̂1x7I5~3.OɻA8Ŏ_V8 (S-=Zӂpi/ϛS 8)?y(ߘF VU|EN_]⿦ ~ ̶?|D ?O\I=)* 'ȅp0[.r'ADJι+YRs<PZ2!y.t*hUҌ(_̓ӅxK.br:rpq^|V9-ĭ0V2(8ko7h8o ؛88ߡ%Q+U(NIN%޹݊1!Du3\ޙwiD)YxiZ'IlIp)aFx*伧WHWI%BtǓ$å+t2v(WW_]Cf`Eّj;~ۡ#9mRO+jסT+,t2\*BW-mm$=]]1.$a XU;r=CWkтKpYgUFkx Q*zzt@91B+tъUFdOW* Xu2\BWh`(iv*Jp1D:΂p>*>+pɃ0E'4,+8jc) :mѮ i$f8)B*S:L`bQ5tmB? Z8l;Y (ךY +v( Ky?uÛ 5G}&iN_T5:}I񗷃⏸uŔ9&!5̀.˿O6IrÑf^Q4: cR\Zyѭe@m!'g'O~d/.ǨbاI|wyAd9Np6jGlihW+mYi6jܕNFKL vRs:\2MSܼJy*LA) WĆ?_לN my=ZmM-Z;Y[{*^r>?t`K:c)gvRhl(R~@2[)TY_υFh t+I7o#M#`EMgh:9he[2Jez~4DB ;-uqF)zHWFɺ1QWq#y*Y$bn9/[i$}C8MFr30["YdEtF\rE֖oGpPa $`=BizShH(13"W5=8+F9rŸ^\1m8O,r5 䊀SiH+E\1E&(W! 2+'bܱ7?(c ʕu!@$W rRi:-ҫ"W+;`>TI-нSrO7̕99֧J2gx1gRTe w6u姫l5W^i k\5,uk2иlP+hkgvG/FXޙBvvҷNhVw:^^NN~;p^q:srj\X&_|nT˅%s sb MMڰR*oZG_|n6/eOT|I$0J>=hVi>`J]o8 ?nG (ѢYհuGKB=:#)3o͸Z~'&ڠ]2͔X&(J\yh1rŸVLTɴ.WLKT9E ־\pbZ)\`C䊀1rŸcךBJ)u ~;rh"<4ܱs.hi0D\fg]\"W5=D!+~JD)rŴr+t(䊁G -EV+\1%jreq+֊+ubLCrEA\VTtA\.*1꘻\1E(W+J n5څ'9/_J 7}NPHr ׋Y7 ZL"SjSB +eJweG:PV;;ӭl6Q׈3-f?gJ_w(/H91rŸJ+ Q\MP"Do%p9Z%Eg”f4?TZ;?% xBifl48I2[ r\=1* 9\1TIe/WL\MQ(HX#W;I{_]\B+IQ1rŸZ`i:(2EEyArEȉbZs+ `[9S]1~0ȔDW+439B`I nٴ -4bj_%YYW,uRLDy) O1R<*Q)SD909?;5]PYuXt)ܶ jxJZ9$L-@kLf""$e ,23yȴr+U*r56 /W Z\1 RhX(uI|v* 4}8TW(l0*z!:$#F H+E\1e4E&(WԛQ䊀1rŸc'p".WL} ʕU$W P\17RiC]2"W+T= +v6+ub+ .w"Jt)ۘMMO1̱LkLeAc$sT@1gvS@Ҩ0qH V+{ǔ%̛`jhZp#͇ uv{Ld-ʙcJS&Ѧ9|t䊀CbpDɴ.WBIUsEFV.(W_!vA o[9Io iE1L볏*2)tZA0ȉ*׈*$aJ\MPGԒ촜m+'iO|c^"WJ4>T>k'^% rF# (Cfsv:Atǚ^ +v"WL\1en\=\ZArFߴF\1 .WD `\MPLQTteBT։+uV\1mTQ"W+4hᢊRiǪ֙F| ш)dְ FwZhY#n8ex$*VDƨt*2*2~")CN1Tt&*듺^;/۳ˋuE;Osޏ@XW,m]7o~_O4!FB$7֯`wͺ+2͍s˞tG63_/l߯NN{9OtÎggu~:;:)ӣ<8}\?ڈ~v[}|i=o{zܡsٯzORt{GyT|\5ǫ\J[\P GFP&RXC41١n{R֘^js5Ŷ3%H1y]2\lۙ TtghC0; n;pJznvدϯdi.wDhou9mL^QV;H~nt h+}I+ 좘,'e]j-LIMqᝳ6r/ cUkVX 5:VO{vwN~&lrVWΆ:6Ȝ(PbdTн4Ջ':c]}t/[gS+R|3R4)Ok%c`jw$EPu$hK}jǎ47ϚW?`rFfjM2TmM|ſo9+l|ٸ*O L7lvnM.IWts_C&+~]#buJ b5Ϻ)B?k~|J^]ח_pFկg ] WdoGؗ <*y$U0>ښx^|/eM\X36O ѷ[+v;4@PhZ]/{'??lגnf:4ꤢ Yɝݓ|1!&jGnH(64:x׬NOWg(_6LymXC^/?%⟫+^;EIN^t~;;g{Gs0>֗y}`mS6imP;cŽb }Jf_nG o'd~tUuu| F5jE4_WWgEEǰո'|bK!REǂf,ü'W=hc=\oo=޽o6_%9OM{Rb{Sn`Sa%@|J qu nLd3MTF{{3O.p~w}tz'lunhocߛW7׋eMXGk6`u'mëk窞/k@r^˱LzU~znQ ZqPm\.+Culbn`Gxzsi{yqܬo6T7A/]Z{ScG7x^VQ7 y}11v9KmݞtUvalW(lC]I9{4^[˚>.UeWbYic\4Zxqi]ލrԬ#m4Fn8R}SH~{X?#J79疂ua3N>LՋd|zq|Fw9m/]YE#FTPbukrDȃ`J98{x-t^d#F5&B~:u-Wz(\se)x.zJ QJ._װeNBb4bd)F85wuczӋلyi-06|1F$Qޑ놀;:]}loWkgwsպy`eQ-96Ԯ5s`C=1,mjmN=˚LY}ҷNhξ8i.^isr'skZ.Zu_~EOvrnSr~ ﶝ_,T?]*W9M반ύU߿ =J+{2+4bz[ooaERâ1RƁa[IIˌ T*8(%*vC~å0RMrLYV{d/ks:\|.wVqzߥ)7gZ8BŷR!kn/M_6Yc6eIJsgHiHnC3F"鮮_UWU[‡܂էW77x}S TXM @(fINHO~-\.% Iw.\.Y*&G`_7W&;,+X.Sq5K@Pq Rf,M% +*fmA@AL%m84N^*K %3Ko{wZ\kJ+&y1$4y('R 1i[pT`Iaa*ͺbެmDr Z)CNp ˶Ȗ.?쉲h͡U;c[rd 'lྚ7s! l"z S|{?q?_ɝwk1VLcvlYM#DKa1M/=m]14 1YRx{QGkհ>;]6OVt~ӛ.'^'1J Eiz !4#g5Sq6wYI4LΘc*c[6 p?_d8{C2$lH{d;b"oNsj(U|Qų&|OO}g5TQFջ9TiQVO0i_/ 72t!]]ǒt1O|Xiؾ lc'"`{|=&j~?q]c6 $!!BR(lx"Rxh rQC߮F?'%煿n8cx_df,>ثlvK0H$xhk#CLdljy_*2}Rv3"8F("hI㯓S+ |D}Yp']Xq1o 9AyWp,a`B1('ne qϛG/gSƄe߮S6,l:wmY-'Pp7^Y9{2.[ R &Xo#zGGrUlMႜ;oRIl˯F?&Zi~Z33z 8~ŋخ),љUp5Oh2^F#,` *~RD԰'?V9 k0WZf+j7dCt%BOT/p>֎ ӒL& wkЁ1ʭ]L@gcO<Ձ}H =< `'um}^E Q&D" )PQ4/ L|9ԱX#E INB %e,KT4n̏+j2?RS!h+8od$l'6)qdj5dBwߙXO⁑/Hm3jg=kц8@44*"w6M\}3vk8M 2V0IàYmVVVS)Lv!L*#mRS=.MWaG4BY.$YlPƱ ~pǐ2xu:)#x*R%ҫ/T)%[㚱KoXW!PhՒ8JBXIRiR]T5TgϘuش%wLkV a,Y]0ڐd6fM:$fƶncuwlcjTh܊e5`͇k5=*BGY~?Iǝ c׏J6ݒ2}TWp1_%eաl JMy%F)"fT V!XGRŷL ׀1R_*D7xPR͈if7G„IU47I pw C=ac,Lp(d3|IC-e"q`iebݹq$ܺMM*ֶc`a~H*M>J5YV"| \ETf<: 9m%Iɛn:B0ad#-qCbhw/WLPI0 7;\MW`jv2ʴȨNS%,fivk9)VMpP z;D&,D⁈Q1a0:do˝lP\{v[J м_ڬxOŽ^,"%XAR:I0J"#`wi"pWTx;m qys9kmhB[a$O&VYǾXKan T1w8 GF1),YSrsܶDB0 x cMѰH,NG*wü| Ls"N5|^xr=ep/"&1 %6B"daX#dlшcz}C:yP#^4>デ8 I90IĤP cl{9T6ʹk$~4.?e1_ xm R$oQ%;ǙSL@T# %F8`X?еVG_/βUCg uV]Q2Ҳ%i FGQk'D켻AXA1tN1OCZ1\g7}mF6yHq%G2bb8qy(:#?#[|m6dWЛOAS*B#GBoKIY݁^7qz$8%Q-3>-clwpo }jf Mۓֶ:Grr8Q)H{ zBZuÍ { CegL4} )!,%sH 3r= !+uz Lcrڿ꙯"2YϮ!QNM{;5ziLm{{paHz+g"8і+n0zI걚E"]۔ 27UJY~Z^hӌ\Fu`zSjq NJ3-"ҠXͿk+ګ_P{ث%"-2T+[)m|Q~b'ebd^!A_a+.aoXahbޗlb#jpJyAOslHL!eTs**~E@qr 6NaiC_A'SS*v ghzq#jON}?p3wʾfA`vp|;-2,3xeqy +`?ܪ8ŵÁn T>}%!>rɭtE%z3? yْ1;w!0&(t:y-/ːD-k%I\dy=зy) m{|Vf`H'sY-Z/%hS+qo0=I$c|? ܤR2VұfzNip : cL [V͔_oPo`?~}jcѯmm;q˺|zzep Iu?WOrαC\4/ǂOi8_V4N,"*IDKȘ"OO% ~w>5s$'L n9p›k|~CV5xXF5G[D霱B y װ2Z,-IjJϭXq)*pC㤣u# IB&Z)MELF,/v4p;W#b?v`'֒il&{٬\ǐ5̠N$0Mt͡ 3>>絠N h)TcLYh6,e=[+4%W/i0bx }E?F?PƏkog} US&$&`LZH<( FXE ]8 y|f*1ZKkU-)%xXybޟ&<7"WyZ l $/<0\FC=#p$f1}rX|Tl&e뿖Ks7'= 3b!J,`XG8Ap%FL#U05 NSK,װBKHRŐl=1$ã+ł&!3١FKC&8hơÌ.Loߙs!0{rᎅcPcjL, S'+s=-  Z%mc)mJ)q$7AqLSwMSLG77h2=x)!JvΟl]BS 0*7 nTrLg:j#J\rϐp lZZ B|&C>#LM"d ⃝`}l2qJsv3hQHeq^bJt4,q^Zt_f/Ap~I{%%#-:,o=kpL`b$ m6O D |l4Qq6ٛ3;bެף1{Ux'TVz'K^EO>dD7^77L7@N` 7uyPL/Hdy<[T 9s+J͹9W೨#y`lRwhD2G-Zۜ[ eJ%"c眢Lm !)(u$Uxg0 \gFKR,12X1R(i*Mϓ٬Nv!6awO! c_ 0>w۩Lr@,ֻ"[Ԉwوec‰e,*a.E^?rkeI-+$UgL%R !`>]VCGX}{]'&ߚlc4zc潢#X1GЩ?Wڸ_V7p;taLK)PFr"+)Z?JYo!=ɲɀL2!KQ48Rj*EgB-2<1k׀r/yw7LU ԹAi+JCs"0iMϯcb(`.T9nuCZpY,';UfBZ05~x!#lbTAI (|ׂUTL3e"y=̽atB3($i%eJ#IЃSD2HaJMod|^ʼhS &FiaɄa.^1{+F"cH*/Ef‹,SeWBji{lW :AhuF6! ϵ`+M~ Xr>l"_{)-2=5Uuչi9}.:D~LFEE ҅XXar`oϐϩX0EjT9;>Ax4aRɶ^)S5LgG F5ŪRH0,ΈDmBeHuaV%(IFˠޯ;c,oh8Xhbe3s 'K /xyy!oZ.m6-]>"jUU$ Y[&H)jQ>A߹6HJ`tv`LCuߪS/|udkucw sP21Xz|w3m TH-Xhg؆xk%3|n[x%'- ڙJ;NcJhVGv{-.0+Ƴ寪 Nbǥeqd,*^hh.:Oi5 rE%g_zI ;7y#vFV` a&;[A&Rnӷ1r;~RAR؟/*駁RѲ,ZVqӲ,X3233ᐵ!n]V5x/F ձ׬61}aw'a;"hF׻Fϭ+!hU+t%*Uʟ,krC0AbW pRnD)Y:m 3y% iXor,v I!` ֓˔`PI&"`PM.E~`3ʅRZI^̢gyވюz]#ٽHaESq|GW2AzU} aۓ.-nh֫7۠KX|5o3|3mN^U}5)> byNBUVq[xI29GR:\Jˌ<}sk%%W/H'x@w:wcXNTob(Fƽ; Vk=\l;ܡz~Vշ1`'"x "|4xAm#%WZ@!LR*M#4iV5*G:aRJc)b)$U`÷I<\Mk/S\rA"'uG`ځ4||xzӫ}Di#a`T"&k ㉠f~@[dܘ*12:i]i}z~$Wl憴"uso4{d’sWS^^e";݁ߧEƀ"aX^IE'ͫLDҔ|{e;-L0zxQ2|NVS4UƳC}f!J\57c:Гz@yrתy.;>ёCp&'tt&{#jW]  1QF@STB^ 4sUjc,uN^7sFZhL#{5]{'\HKiCyi 5ŖRdc!  .%%ŬsIp7Tx\0|<&#GNb3A_"ܙWGu BɊ;qx6wk,>R#O'J%K˔F©9) RR8C06^5\s^4u݇숴@(d͝!p2s)eS9Ri0ONNk.*B_Oel*':KtlӢml{{xSQZd 8#rË4 r"2[ 2U"xd#ZdO$_Z{?͊}N{OOwйTب8='"aHZDh+g-@2k\K =v>bOvt^TGzk0:%^"r]ήaKIO֮'k((ï@QcR"RVHVV/nd|fN_'i` ^R t4.% hsnsCŔ\.LΜO7k 6WD'?)A~u} }e'$,YJ͍ȣ~E-P)ɒ6& #X!3@^+"cG@ҍO8,ɋ̃iTgT+or[,L+>FD;pIPsV*2(SCiIZ>:ǿaNz9fb'gpghUw+Tr>6?{;!7͟y^ޣի._T3W˨XkpZ5k:?x 9gY{Ir+\ոޏA/wn|O[^aKՒjUUK#c["YdHpFU\ gw XǕOAEH<=ܻ•ܤ~V@Ws<1uw).{Z&Â#"#Q,O9₂ LP,baT`vĖjjkZ`SVtK#7/T0-cYyEzE{ 4madW|N9}9u֐ |"3.Y1/6yQ$0QLCpV`HdUn4̶-r ح5.v]?(nP񨾧;0'fG:<6}{(Ex};V|`Z_%q)駆兄3NX] su蒗eݰK)8|l:N j̙x2}Ő\6ic`6\ǥ~<]W_ Sg/6%LWS U-ƮQJA#ya(c1SNQs5{k99?l7YiKnղxډ[$XGׅu Z$N<:#)18֩i)Y9)Y5-., 5 ( ;s>.5. p$/)"HMaЛ}A)'vu61]~b#eftbM=Nqv5}MihYE6PNl2wb5P";q J堞x4OQk-eHƮm1,25eQCޡ5Vn ղcbD$!ip0a,e(ƶEʉL5Ί0Y2+/+t1mW|+y6׈V]!gri] )Z%Hѕ}632NIFU GYL&ɥN`82¿zR!mh/l')D Zn:S奆k*UR* BY n1 S 劔cĒ(~k+byp{ҀUs>GxBuLs-kQ%"eb'`q%HG@')4֙k,71Ji%z2 ZNlz-C Ctb*:‹ׯ{ kqiMr*1#$F<'AM)OXd qML5cjvCg`m*]$]TttgQY@5\(8 R>Ze)T c<XYb2I&P*v5gmp* 4 eӜkNvQ^^]e+j16k^&ܲR+g-]A0tdE *5&YKLCRxf-:Sn{BGBh^ʝgʶnqaV,T{m0/{:f>/VR zmzʵ)w)&RĜK 3v қʦ8)+e5!T[VI>u.揙ࣺ4h=}Ҋ {FMY3M9xDHK**iׅ4>գ%8euѷtP #^)ā;7aZD 9n4[b<"&YkSܷtAu;gީk $45PBù{D#J3jK4;߱\Us"4f#tɳIQ֕rr7 X)0|}|pBJŻH p"~yu"W9C [z{^ M.68#ϣ>Y1EIv[.@_U:8(ჿbbaL63'4)oѸl(!ٓ FIiHs "{R%_G[6Kb[dfi eb#0p40`t&i<}]zPq>OO_Փ L$kj#)d`q^ ՌytX'֯|p N*&HE=.8KQ `'.Y yW_i.hl|7'zWNj%MamGgO̎Q[F|k۷;~w) l|<-<ܲ(xWA(!Zt"5pw[lq"5݃ͥ ϺoE4s @ujioCȀzduAV2LL?<3N.4L:Myߡ\WDžOڢ\삯V~5Q y^ߎ` ps_Hks ɏ1VPt=(BaP5d^/HOg%r._Rr{J"/ēL`sz:?B0&SF7㓋w3<ַӋ% ڜ}ry}_FG0PBCײXaZ Ā$/jBLLJzx;~f18f}O *Ư}Rh5*xd%f{yz?5Q*lV '"͟@4(foo U0%2>FeA:F" sv }ϋ,Y屡ii&͝^KKJJO6˾ܬ7^ZU,%&4V3|1C9INLtl8Y*/7+!l<Nq 8(r֝;VG*MWˊ}": CS*\P{r!&^UE(~4ٶ~Pvp@Q(IV?4;tA]cQ.'@u h-L"TJQIY'H*k 8:xrU40*]PV V;Vt5x@2b֢C rHAA2dD+F Mcv~=ʁ*Mـ%6`8(%J**׌g. h6t bjE3^5) eU%^6pCӰ;',0S@,5  ׈ݲN YYξzLDa ϧE1N0Ll>·ŷA4I a O?/>ޖmHI>ϒsV~pBn_G] ɇ([b͗qVDiTDI7٪ VlR&7y/Z0Zv1+{&y@͈쥆zM"\q2i6LR.WwDz4̈:\q$zRl; | IעtDIR D5YI}tLQ"w/e8/y-ks'i5c4_806uC…CB0رfLJ!sA+-OQ{-X$Q3Z.IZMe%-]3},w2=?-}$er>>>ൖIyY&B›\Y`PX2]9~[z7J^h?G.~29,R9؁vS0A=oM*c3_$4P1S2 =+mnxϟ?eG׊١vGx7=ZXMPO)KůTJ \H%;#Y%yZG?Gb4춚(yMD_(D%x׶ʯf0UDxl2؀L{ɽިDK,'Da~)1&0d NbzFTތȁEli;:ԧX!#wH,7/r$skևiβ>!]?RQ]Bey<0E2ʃ!a ˙d)N؜X .RirUwR+e ߓx<,)r{,y.ɬ%W=v_)w"G<l^,F'"Pf쏌:Hok= ?1\D:( &Jb͛y%K0FC6 <((`5ؚSR %crd6n-ۚZc@MWրlb9#Q,O9₦(Yt&( aӗb'<1R/O4' #@ݶi2iio;?-%fw"AJXqIFJ#I5'92tL -Qc-+@f滺=m H6kT9Y)tKjU\Ve$9jN$I4ӆ $N☤(A9" '@ 0f5CgB4V*LJj  hMr&l.,2,:7Wx?{F2<7i,4e ,dY;`l.%\Ų-IR!T0keDG̱?\ ` %-1j: j`RWݝ~S G0֐ZF&=*F)0 ELx76h<8EK*bN.9 OtDJZ(S}1!#Duo!$mIj(յֻm/ߨlP ՘$_m !&1macxཊ ОK6QA$%eA 46:YM+dB7`E !4kpÄ;D$CO~L?IhrW)ZHsЌIcż>%(Ôj'-j9Ą֠,hd/VM|҈RĒ%CV z.Th,(m%iSXxaMrhldr#5 $URSq+yVR"R $0bn26֫ x%*%ҋ Io)5}^@OMNH\iCY(n0mيƦzn!!(E!ōa7ն6DE5õz4fCBA˫i )+W$KoE *J`ym{W4ֲKU~" 9?H=;29y'Hu"<Zayp^h )T/vE&{9/A X DiH۔ |H JBYDBofƼ"pj S+{"}Hw9rV3Ckbcך8j鈪*ie*{u\ԾYPUoꗖu^B] T.^m,[]\hxj@@q0 lp9׽{~E jti"q()-6J"^P V*g*ʸhkI[4TO3Y}#7k{hCIL$=l dr't"*j.Up*le\VҔ[7J~: <D^ N6a> 4ýEn[5Ʊk 5+0$Yi.P @YʽA c5V{ozSDlgvHo肘"ut2]U@UC "P@ GdԄov\GS+e-UZXXHxB+^AwFri\ EV-Xx`Oe=>о^vfz3bΩsS2ǂ6KG ((PUR/R+6F KKpW4em;*w Fd/.0S?⬚ƣ#(fiRiY:IK켲I=+By݉ɹH8/ѭಎN AμJ;^I a~y6n}S`7@HxE -FnmsEc Mf&saY^3&Ly{s[1_;z pT> -ͽ֡CyonBȁdQqF$*0ut!NK=]s^\x|;;C"+os@(_S3~/qσK9eu֓x9Rda5BSdp_L rJZ䉼Cjh5y~]}Rz:ӯind>W_çBSg/X_~}G^/HU(ؓFR(6d\?-Bg[?ȘڅvPj .,(2V2')Xg,$!*:$Ċԑ!N(H4b6uӉή狩YAqD_:xoJPu}Yɗ_)&bjNId`Njӧ d[Ѯ`7UNYkBf*5Y﫡\\xnv^}hX}C'xct߸}])!y5B> A02B8B"덀[1P*Ujou.n17RAX }[L]Mߑ=q&[mĕtD Dp[[Vu3K}ף- ?^WAwO_ W($i!TWfLJPn>;%7 Sr5#j> w4H*Ty<@8}1Dpw7- Ubkq{6g(hD7n{V0V4B1"$:9-Cv H ^С4i\(sWf9?_ݞw/1?*'.N+/݀܃?Z_-?ɇù8 [Bͱ !ktENGS0AO;Vqh G]"GJV!<7zt~~#Gf׳Oz_\~uNyW]~Q7ٯi͟F^vSh:s/[7idC[_b]G̍~Ojt;7>բg>Jr;E7.t%.CPJ|XBpRV@]jqV#S(۫켇U p I$A 2ZY0Bt Ϳ4TRhIY7YK~+@1e/:9Z m83VY ,)%vcEcɫCǁ<jPk]nz7$~*txwwZ:E ¥ E n. ;6֮W2> -EF̺MՋ'[ wA!cAF°Q\;P6VmقnQR  M4GВW£ x|U/8@f00 D6y}v=/X[(['FR"p8.D:Y6Al/ȄEֽ0l &2b|@8흱,ꛋ.!Yج/}srk>pZ-x"Fkɳ rQQpB$7]0a!QCxЄSj]\"z/1Tۏh;~9ƷSkm{#*l@㍅K"oi;n񜵲?SXIxk&pÐ &ף1՚`mYcF=w@ Z^g9*uJbbd0PDI! DD@ NɤysCSs 1 &z.x݀6iH[0mDhF+blo%m*Lʺ[%fŽ3]=Ebq"' o'2dRN}j՜26ŧ.M5J=zqyU/R-;Ql@ ^0f\1FQl.FF_#'Wvqf볁xA @;RO-0h#{T-OD%|580y1BTfbm۷`~׈G\~Ժ 35} Ib&(w!{ 39y` F. z]J.`$EUssnAhƎGl&j ̾\MXSlZhNOE[,[[ovhppJ_\sԀl<{lCntؿ_se@]P!7 j1))fj\2Bj))*R("z&r@wXko"'MYYV[ )8{Wm y,Wұ\*Rg^!uZS!L֫%d;roܧ90"O}{;l=Tpޑ+D0ǀѠ(<pv145Lkw"U<.5Qv&4(]ȿVphJ{:XNBxQ L㻞dsyc C}"_̗w4H,*GC!Yw<߭-2skNSȓZc\fW;({%M{8 M6E9#z.$B&TxP;`Էt`QI:rViz@{jE% K#‹Y[QVTDb&9YL{ ^'Y ҷ G/[1Iǯa[&̤+ RaF^z`P)#Cza(ssmE䢟VHAxAN~JSp XMsV X&`|A;;8IѳB<ʩJu^ЂIH t C!g < JBe6.$)ף.=QS>_#2_F>̕"41뽨*Z>|*^?iDɰ;Dk6mAaVނBIS;erT7_G=2}C=z8$y`z@)](oh恡PyǛ-Te*!&s&sg'5 dKix}Ͽa4:+O"OS綘z,DtJB/54Pjʄ$/]kO$9+/wW{G83J+hf;B~6t@SЏYpQ@Ut'd449PUYv>q_s5g݇eOs9>Rv^Կ؋uAy>.mASO3,:*uШVFu4#jt`*S-phA ,y=m3kY獇bQљl\'m8lsɖF5uՄ4vT*Xp +G51Ump0zTaC`fp3yϥL6nPQQ\m '~GI~)$wdž؏|C*쨑F?jEk9eK0jHFZo"_ӏ*6lu-nn{βYzzr?_|]I`@\=G%u.j(>z!'-JeրʕQji?73Z57ƕgKW)V~s+ k; Ţ>t\bY}$5>Ɨur;F+ jn1T"4jȐJb["LL)]l1p"kCTIwbfr^SE1dN6{cRIGmݗlH9`Қ((u!ɐc=g.|,O/IԵ.DFycGQD } | Q u7z0.|݁9.ÜL=7$[@yvO֌d^C1!/cu^H>c5IPisx5sd2ZgHV\H'e{IԎ'/<"wH3g5~]faG˟epSPʁ/(f7HG] i@ʵlIeF#h$K9> W!_[m~=UrF>h%E=n6//2}Z 's1}津op(~F3u?w/sG|ğw41Y& #gO/;cItpqқI4!NgWo]wA3hY"*wsﮯ6o5}ZA>譀@x{&-3v<;woG'G9˓v|+Rf`VHzVH}ER~}C.ߓiDŽO܃̷+R(s6&65}25'R*]0*5q.Ȩs$Hj<dڱk㍲-! Z GOVcRBw +a9DUAAgBeUll)eäY鞔CD1 Vkb6Rqie&2ڪ˃l)co̖3gkkkꪛWW};T `!>m.RNU(bh'q1XKZ,դc] eɲ7п6fl,x"~Po-#W AfS@sNZEXڙއ_@j R6i|<pǠ( hUu άـh4j_VxDDio6)JE.W 7-,FX-O.MIkRNYX|CɭWӂG nG )Vj(ߛkۢI Y_,\LP=wXrU(5%!˪gx\+2뾋~}&{ZM;=`B^DŽ|5ţal]ǃ"q+V<؆`T H8*9,4Blhy9E|*g=cj|Y&hi[wh]Oh""qWH/r=A| 9?29M,=`N^DsoG0]b<3r=AEj@g0d zyA/%/)J ".4녥yG13Ni?y*ZTC:EQmVG`Qރo oC ͹Bm Q<g7d&{K Boa%Bk0+o果{y;JV잶,C=Bg)w  ko#"#V:(-]%ׅmDTQ6&M$4#L%դJՌ*ZJsV }РmDCnt[gT #fT:ZCFO^>^gv/]БT3μ:>!u^Oz~!PO6k&Cxv:'6:Q1Js8DݢDg/;Ol['^D݃3G`*k^`@Qd*cF0 8Svmx]#-}e#6HVsk\aqnrCFD] !:zXՏ.G< wTPw8fǧ!O:bUrwvz^z&SCi=ѫY9]7o_27[A_w\$p9s7yoL Zm:MjIq tn } Lع$ bB'Q~3t.]]}5?zw?bq?]6x!i8,ǹ䆇aMe3|6[ί"NlޖpfVA}{OH3z49#;+"G=bg{F3/er׻csbq5>>@Wg,աlUQ"6ې!d#G\͠x|O5Ll4,[?1ApםhGDSw N*bى7NݍSw+M>gdWި |nwFYxИˎަaekQia`x3B+ޛ!gM\|qHvh9{\Α哓!Eo&?otNRrXwφ9D=b4)[;r""@(FKV2E(?}^*hwCq:cNj凩@5/b=w1!ؽO v>Ms9st%W> 2׿8-Υ;=+;2$/v'_:Xp_teԽwiP3 zgp 6/.qrL` !qrglih ev!x68 KiF_kW`x/?}Zl,Mrk*k*|}&ahX*^l˭hVUP sR.j 5:IűfvD5af X*Rq(]*QB̪*=Q,$\P!k駡`Ȃ}ŪsTHGz:afM8#q/g.?R8>Paf59H{ĻQk;=c]3=[$8Ѷ^O]&okrށ|1$ue)fbqPQZZh*!t5}kߴͩ hv+ 2!4rTtE'!V\r)A6'5A/R %A tWFߡz`VB۹Fg%j!:Q)KيJd]6z߮?͢ɍ5: TF(:l$C*=ж(f: RH{'QbP*j! 6U!N-I.PqAT1p݁ MqII^Q\U :_5^E@DbKVBdqC4(v= wR]]Y(fdr켲Z1$SNyt}ĘKv@5cB2Q+| ="r\`C!]}"9ZuX^"# 5:ȰUjsNŜ@["#uEaAB44$ijL1L#Հsu>eفb+aC&[S*'"CnυNiAF`̚#A2_9I^w̄"41{DaS%ђHk<7|Eդ%L(uŤAR bY )3ܕDQR5Hst6_DQ$yt  6 C/D_#~1Il Rs-6hv4j+ "RGFG8T Q| 0سŊkRCVnyP~">Y@5};-Aߵmi]c8\0UY3ƤE&;/(#Q  Z9bY|<~Hr4tт;xkM2ޥvESp:v|yI/s۶c8E*꽻\ٴX[H,\XzKB鷙ͮ(B+_r1g+SKzx 2&!*2;TVT*d RG}p d֞J,NNKXG-Xj v-_Sa)Pog;`A(ӵF ̄ZrXGuW ޖ W"T1zdˡGЅJKNK| -ޏ!';sO|xqJ&谸 y­$'9բ- D',.Kdwe2/ :?^5i-1YӐsJ&٨`Um>fŃ jq58*6 T]cy'5pݰϕc.'w Lʗjj֤HePӀJ8$Oڞ7gևZSUJŠ~ W*H8 EwfHܭƂ4zY @IVyW7(MgFi:3JәtNs["Coe+%M;+][ nv18]j ASB5DT;>s %B68+ )Rg*gt ׃. dQPB[gUDўV,m?a@og;QKED |{n+a+]]uEahԝuuJeu^ )JpUcE=l˔ Pc-]vFV3ΪԱ͊tjДT3W9 3w F✎6*7@X;cDSWNzE#A%Fu#U%*2+/-uF[T[@ -"T85KKFF/ȉzHvCk*7FeÇ*/=Ylt{͘cD\P7mptlI^ɰiZ+w0,=QsӽB_f2qVUoc<U6 h:+>N#Hvanv>PAhN<&mY@B;u+jd uV?zĵ7WC#x)y5#4UBW]/;\뼌ErFtu4׷?~eJ%z -_O]^,_]Z T"r)_]k3Ckq+EnG,=頭>b+=H%TP!8IhҾ9qvG9=BKGQl`bF-,D+F]B+6s$J7sέnv u+ΡN# hyĽ)qv+vWh@;d2WVh1sNz 9~t|^ {vWhC;/ y}bp. kBLfcvWhk@  Fv Vh7i{vEA;'lQajv+EzҕtZޗ TkHNDAG}$U ΩO*;N PCi(co]PR)gwJXelfGwG 16˛m?_2?ڪB(;VZUχ:>um&ST}6s6UgՈN&^ŇJ1՚RJNT pSMNg][X"TiV2!Џ{ZW2߼~׭27gVbT޲8ʏZ?R} 5^9yfX[+rsu%$ v~8v"]l~S~@TRx=?Uj7q£ds,n|Tky"-թ?_q|tiV:GB57elX7&MrS`l׭$12=Ԙkz )-YgEbF.5ң"D@l*.-SUcr.LI?D#y jD?VgE^LPz|n{썝/ɗvx&O׃49Na#MJ߬yzdl=rYZ0-(iĽk'HDH+FzYYݬBuNz-HhRnϔ~IW 0(ytv\d'OٙfxQ؞8z>hV9?dZLYBHAŷk YGpV1fUn%1x2NJb![rdf`Kcw\`>]?&ZѨ9}O%%VjP+jP*l{W,=تI@\=uц;s+klT>ʺBvb$]=<=KZsTXW;qDc!9qp> >-tČuگtļ,'fSmo9o M}|8`6N'I鶙FvN>|T. !YO_IQO;1IήrvG:|0Q%R>~;)&I9#!GCM9}b9yX-WNi'Қd=S:Y $I")RUұ\|ܔ_ a[X(Wg_g.IÔnCΜU>ݼ[qIsDwv .P͜}bi+G5a10 %gєuŠ %c-I^3XC)X&]k7dj>Ʉ`׀%@* GzǢp xyA/G7Oru](R 1qRf;g23̸8cI2v'։ LQ3ȎO2F ;hnC P#B9l9s`y./׿~0nlzw`şD3\̖_`}7vW칖Cpl%Dqv8ӌQSBd(ڂ'W}m}0ꪲǚ9vWXe1}`r̗'<iC_$yz/{ӦR,3X+}0% 'hjEHramy8pBLݮ8ԓJ%3>l {ذ?=pOl'_x\fj>< t6\IW 7_yW?}_*BEx-djUD {#{8ZA=XjC`]R2=Eс 7|!ةG'R[ξQT@6Yj[rey KyOyU3b.(q$cA\ד59s- l\re:5 8^߲@ɽ%hIm9nc $: uR^7,73YU[5gJ p)Ь8Sc+4r@3>Ƣ[춓;H~7.w'C~r:0C!cBs9vq"矨u~oy?Rj)i,㻏4.(^񸑔FU?e'+ K)4g5Zɥ c8K`$Go:sj|u^g}5O9\}39OV4dgoV /:%ss|dy k[OTEeŹ<{FX~+&qj1 n8а9M:[H"UOP[/sl9uoBdr-GL\*nNT3]Q73[7sz{_l~YL(_9Fp]͵m]jqK!%hˠբZVeB|Qq`J#`C3]bRJ!gs(uγaLs>9fpٞs-xW?YHY@d ؒH,2f3-"9o6L1Vi÷ @oW$+4zJcС&!JxO]DʮNp"@d:}h 9CM_r=ah3j 5JM:W2%g1eK>bd֬%ӃY~mx&G_♻+(8Y4=AkLPPCt OG: a\$ObpG>)G!/d8_l,Op*1uz39C:;!DHu{n4&yD{ }s}ID<$gvUx$O͡NN`a6q@Xi%o^ǖ13/h(g|~KJENgyn QkLR4_x1F{IėfN@K޴F pt{Tܡ\糣lci 7rxwnK.`Znξ{SQoHn: ?\$3D՛fI;ۑݪ-V ml} ȎSv4EP\*``NNNkӣFTfT⋠RH;=ݪOHaNvC#5ΑbGv3%svov vhC{TFLx}tY4~خO^ٵؗMWcFbojq?߼u{1Q{o?+?jAKhjE۟ѫ굏Gw_vYW.E'Q(0Jq lnms&\.h%kwİ^Ȧ/dk:CSEK{N67~6-:ZdC#n%$ZFf9DNƨ qbr@l6~ܺsI!i ,(vj ctSrM2NI^`g!F4j||ddjռh͜@f* !2M,p\ 6H֥u.J:3 !U{yB2tǃrǻv+O=o>]eуlW`[~8Ã{_H:/'Eʾ.V9{yǿOap@bv$-:cqF,UWu@J9I+n ݚK/io iC\\/F,{3Iy'c#O L+'y#渢{[M˖"Yk ҷ7~FiuJ'T9w"q\FR|. ns行1g:v'h512bŖ_?v7cZQGVT)0Ŝ˸^ETGQ-sls)$cz&/.4 +=mjmnKܧ `-0r,Һj-R^B`4-33hmS.5spEBu #2ub"Ky@³PFlj!Ͱ"#8s(8u\j piq tTImXw!<3!^h  cVխ\i)&坽1'+zua87%<9_hݜ\.AFA>luغQu7Ji0)\s66aEE\Rz̩D5 4l_\]ipٙnXحJW֐rOop#Xō-.MᱸI_F ĻA{c.\ݽ7=oGIiy@%IWkܝ,8Qϟ\gK2)ٓǟ{Ybij꫟t X;y/jN<ʫB˩4BꪢFL')0Q(Di}BjM|qݤ򱾃$t 9zF?_3Kb:5NgS3i=B-@\Ҥ L[^R- -&{U%nw]>l./0G$gښɕ_!tfwd$"x8qv#a̜}9;1Q MMNߔ1PmꙘ\%=?Ri`M5<zhF&1a,ET+sM)>TBC(牍QhvQfRJ9Z.9K97'?"_NcVI ]RIj^0T*Kmցd}SuU"eKRr?1ZU%O5eAiz9t^{=e5v1jhc5Z䘹>eJvT N0OB%":aRd-@$$HRp* .t&B"\";L_*+`fْ=9سRyIڛޣQL*=كm VsvjM`ș؍ O6͇4(O1ob(F_&˙}Ȓ=Nv`qwl$OU|X]_.-}J^6 ݉oT$:`h_f믁Pf|Bne!DkN]]ߖ[4Ŀ}VF \}piGmv4Ȭj;8^*j5GTB/u$g#g2նNJa`9TѐYU'q,͛W`ͻt9 6j1+\ L;|50sǶM3R!ծ6 J<ӗV]dj@"WU 2fV'#|?hrh[?ZGN:ySbi+dǏVQiz[HxQk"2jC,EUJ3oaMsJi![f-ӈG'y_llheVl 쨞/Aϣ&,ÑG1dIq2ZΒζb=sF㷪bJ.6i$ .gl`'ݜo}_]AF\**~ˤ,)nGr邀r`%x7eZaeKP|캗\bTxͫ71joEN%qZӴ`p:xԮw3leS;ZRie_YW.2e-ߕY_=y3@T#'V)9L,>Ҩ})h}LM mv5Ƭ mF֒-Tg{IYU)9L@NI-vHk`mےNseLfݏy32FJ2E"Z.c}&.d,a_Rk3sU'1o7r֑S}wViohVgjOݞ_<|a[k=ECV4<<} !H*kJܵ ?'mV7})0Uf|a_,NYe̋U"Gݰd/\s+6񄕮(4C@a#[;+Q'<r&o:4Z㞯hWH44eqymeZ9Y6?y3hMִ`M(9VօJ< UAN׭(9>;%P W:@ei޶d XogUN+lyrcґ"N O?.J,Ũ9x+"+^TjU g#__)93V2[1Kmte,-_G? Іŧ74(zZeTLp JƼR=S..}YѬ|4(9#PXVBeπٝMӽp<W^Ph}o&@\i窼hve/it48ث'KntK`AX`1k2T6mޗ d2y; $} {2Qܒru{1sL 9:9벀F\y65,vxRM:_ߎNNFLLϗdt`r\ .WYW?uT.8'_=fR GLfE=JEm~p߮`¾ټ펿O {"'eV?d.3A^ >Ӑ_q2`UYv}8@W 95ZnQOdz,J-҈lQёZCyEMtQWG̐bqq:WqɎk o=H_'[U#Q@vkk]%*ikJ>p2̌짔v6ALfBs*RвUAY2u 6VCM)y7F~D<ҁ==kKaOfԙ>{ЎaC͜*'X9X`_zGv siW˯o?ϼ{kaǓ]./@q28X$E Tl{[1(=0,O~F"9 J{R%ÀCm9KfdueHY@)"Ri]zT}DKT4+T \e0'u2IRU t*:4TEو"$GɪJ`U+uQ*CefU,Qe-UU]SB!( c:ҷ A@1";<u$מUV)g";9UȢu;EuT Uv: ŀXica7(TlkIF W(?jOH>=_`,64DW-5bg K~?:y10;|I&R5e+&<0יe%}kARٜ9(EpuP*ʇS$m\M/nO/'}:Xɚ^8+Q"٫J%/""_|SNluӂTӆfWJ2A҉s 5.ŋ)k*ݬe-8F=`jߔC@->hٶIlH:>sFAcr{9 r!ˋ(mk,<'*!2:uX&xf!bL"ɋ RYH3)?CYXtDs(ӱ"jQͿ0-#@(L4.(I(Iҡl0,LX6As1@:S]ћiUQ.v%=X7rDrqUš!c.=~XPˣWif)N:yTj@yuq0 ӟW|7K?oRr 21mdJt'$Z@b\`s^@֬*6䍪R)Ɲf4 Inh_š^5[`.qn қẐ頋SͶ-fWrp*ή@8^p~Lg8$=l:lvzg \ҾD({g'!\.2spWLfm^r7T!Sı]1z&aݏf*`DsHy_jfЮZ6ju*]QkEw ?y#}H Be]7+'WC J>_@+2>}:3M cbM-d-~ӷ>JVZڐ3y-?_#9r|DK-;\9XwG޵,y!gAA(r28\]Gݽo1{E4NK#1;lt?{ƑA_ؑi I6w9ఁO1%*9~CJAU]տGMTQ*0EtM5P1RQpE%|J8v'40>ۜ>I=?Eʔ5BZmӂm(\fW w{>b-9R6́|S@]v8׹3O M+ 83qSZ 8yeix~d,3HO:`%n\!zz7ɴL*Eg ޻}1}BG{ካgW̸\'FJpG1Շp[[o-i'W F+BCsyFy  Kȅ8!ʽb^fV^bl ]hCk2(hmж $a7v4%97}QyƛVG*D/:O0oC(0Bl@kC5ǂkD%L244z6_:;"{l= N00E1 bTD̔:pq9̱}rR D|ݎ?rw7J7[?AO_  .Rqt0M*yȉxA 0 `=('y"(+3x L0'?ռO7ofz$jȚ<)םݞp4NQޡ腓XL1kX Zg[B agT>sn7io>*&'x%4KP=f+5蓚(+m?j:z$oN9#%T+T *^NӝPr9 մU.ne3e>F~zd!>: 8V/R~ͪ2_%8بDCt.rWvY49v^%cNe@ ,g~ V@%ѹ{ .3rԉƇC[BbF5-m4߁;\uvjǓQOxy=cΕ~*# Nc3j|d(ZNE mt|u0#G}K7QxNWJڤ95PǽI1pʩ6 +Q%mXh8?8;gs0A4ZG0/ ;7k*´w=s#A'd l&krHUX&#e10uTV QB"Is"2.,qO Nǘz]Ry<]P!plZ5tr Ik XWy ;I ^(5= d12!S9&C{>ء)Ww1&˸9P ŴT|(!4I` /F?'6&:y[#tvWWS0Y;ֈx٫>;AsI^ߊv׫zU^e҈;N 4._ka1L ;xU`Z7Jo PL=sck ?Ң__:, Vw!1Ml "Oz@+z!0$ 4@[i aHBsZڀ]*g- 8P"" X׺a@EKY/R GX%qsA^pµ&b~\d?ɕ|1&o]}zIO;ߩw{{(١i%}'f}HFS0Йӵ&OQonԷc,_⧴ɿ_ɩm3퇙'an#8UD̠&Qt߶~W)5ju{Hg>崗 V.J "Y+ˤ gJ uT[Oh0)Ξlr ýcR[{N26[B;aU_\5e>G]'7g0YУ]W$^:F[cf`@2)ҩzGDo XçE4x+qC\On|?L&g3_نmjAS0W,bn6 :B7e㫪40NpyZTu)Q%7HgVfG},&|w|k/(bW(<nm?8}i?,bc_Z~c-Q'YzqVwѯw]&|՟䑺jSɔRJ0cka7(?T#֑JK+jr\Ěa64'tH\0-'kҍR8lZ-WGh#wwr@=X"c/X&V,W!0/ 4Hpӳ\K3EIpPw\eyC%fQkQF#4ɹ 4!l`FN% XPBN6Zn Sva6Fj 85gϋvYRJ$_5Ѧ jG85ƹɳ:0c" L8a( HT"+;@=m_HmHdM)1v%pM!r}J41#k,.,DGaYvpxҧA rtʒ*NҠ3KR%e%e'u$jIfvvvvv䖬T6a˘KJh%k/a|H`mmhu抠)*"c-hLTVyx9m<{so%Nԯ }6EH'e~ :w3&5r!yAOrؙrUaҫԔߛrA,u8a\c5m뭎ζc[Ѳe)] gmlލ}\*G]/}}<'awCf]jOȼv6u/c_ŀa{.:C`0 aJ3,KeXb͌[MX]`ꂈ$` r/nY&02Wf)*MŠQ1q~fņ1{/c#[(㟂+hc]$ր[Ĝ,&V4dM0g4$؅E"MBݗ#ڑ!ʪ(e] "UF H@ W}Ld#XgD8ÅN3{mNt5[(-N2/8p 9j,ksķG!+)cAbm2)¥Qe@s*&ZT` l30ptXh"k~-vF|D%\6c˹ǀa '*\iIԖJ]YmOHcP\QC>WWr"r>FgEϝjA^ZԂosn.ڔ~r׫m.ޭdz6jӟYQ4Lm@Dȑohj ?EB.Ѭ/W9?8w?o{{MJI&N  ANZr *NN"bDZJjW5crsѣA**3{_Űۢ~ { ?sD%Z1fC ^Y]Io,,7g֎T?f 267XQO݌O+~<\ l _Vlɩ~10nG;$qy/m@'kьæXPUz29Olae~{Wy1-YU{gI:瑉{f:BI]=oG9MvTBj1cmFw;t(>_t}ZJϋJ~I"PDV8bHFB^[x[|E#VqhA͙;mf|K\%#_ڮZˊF"7 NAVӢT wɂDP;/˷'e0'B$A@qkԯʁ.ۇY{'tнwB*t#$K&f.,Kc,ђs wR+?Sm-`Ux0}QP З7TJ5Q]2QF6jӧFqM-riŭTo' $Ӗ'vM4 D,HՒYIsD5 B MHLEfN ʼnj+Sɰ22*DqjD%<.빒،%&(r4”8i"/h"x&%ӓ&jVPȱ_/W"Ty>[ .JemM,@+c聼ᢃK T_>\p0vy;7\֝fG3[>=BC' ]v>V= lg4i9J!F\+4Q(1@\/,:D~`ItKjXlìDB\rI lyAg5@ I=IE~?Dbs_MM&bP\Xf>ЈD*PsDD zLKa<$iQLrY9ĺ,!JP[HdI&Vvlk ۱mCs=XO9 e,X"cbBo"c *>XPt7Ŋyb(4,)]?̴Bjq2DW6IP_֝M7j,kqudXSI$I,lbhD d=3O+*k6x=y{`c3l:Hܳuj f}CW7ܱskϥj3 6['Kɪa~ (QxPЪPhP{{ʞgŞ~$rvdSr 9 9Mܿ% ݨ!)h)>-w V7m55"y .U֡qV8+Ac*@>/ 7C =(kZ03b 0$66YMkΓ8,`ĄRX&:ˌHd_M`ٻgJM|gáKOF{Ju2G۝^s'Uޒ*L*?**$: 8wIwd|*~WM4B598ւk;\ jR\`4$!E,҄[)LAlfrIY)!ƀ! xjnU- 92Ĭid(NIpK2ЮtMdhҭLB4S lHLx|.n\C~r5%qJ5^kNK2k`2zpIg1x"3* 9'AnP\?+} ]w8ւcz*]Bq_Z*e_ 1N$xt;xxS}A֩S^)ьO aLxjZ uc"ڶ$ʇ>a5R*(5 {?xz=8^5Xio}3KUz-k]ߣ_;{5Vlxƿ/Z;YSy'V-J~j_W֒ⶹ9>"j;jn-ƻzcpcʛXU{I'6|3{%*sa׆Ѣѕn<Ges}ht}<+na1!Tx|8]>PWrڢpik^[g\zeáSl[B_w eK4bpw{8`8c[ ڟHC;v6wF+H@$7 )16Hf$,Dc4JYUb`V)63UQE{uYBzM@x_MVֆUV)Xg֣Jg 79WX~+!q g)Lcm$%D$J!,3EahT>OĝJaL]qkB-"F]SUb^_ϳQĝRB19hVLEN9u Vf_,g9MWC@#MɇSeó< k_xv޿ 闯$ .|4܁~6wӝy0>ƃH7VWKr#+^J2)Hsԟ/Y((̎+e8O㫯۳j:+Ktb9{vv 33xuxy6gCfa*NO+΋{݁šMm!;W| l(.Y`j3"ƟGS L `4T=filQѿlJ:^w5H翻x>]E:#Jlu}>~/;/+v-N]wo.3 L//^sϻ%,—.^*%W9\|z k0@`%ߔׯֽtq=?uQ`pծAuK/rq4|Ù8 oRtz5ޭuʙ0ݝYs'W;(wGbh5yiu}';7iΧ?_9Ltp`>_>~?]Wbƅ/W$NJA[ۗ) w@_|TAشӏ׏YAg@{*G97Kg:pq3 /+ڿ`57ɛ~&gM?g>f|;v٠d|z;Vuf)E8#~)&0p`?Еir|3qR ՘/qU%f6sȊ^/4lSV:-ԏBE jv>}$XU#E"dRtSR9l #&g&a.^s}|y'wZl`2oD\>3uɹ1|c2yu,y{R;~K5w^#5z~}}etEsr\]Ոc-@<:+eF]a-( \U|[n ֨]-®@^-l69C=d@ XR jƒǢ!42^>1r!cr[z>~r+v?R*&tDM~<%z MQ/w hm JQUCWŵm<.#z49ZSnd,JΠ_;Xv`ޛ|\žE;}?/(0MvT΃u_ě֧{f:ok%c&3h4%IhU%H75ԨکAn|/R7ٷYL(գi 9hn!+PLF@wn)͓+ _Az Ti!킦gͅPmIz!H?ҏ]6V`.:FI۟E-aӆ F1q݀0.]Y]hddc?0zH ʩCQ}qv@ 5W.̯12[ w ^^֏Ծ@2U73=/OonDAR5S1)$;­U{썬Z!QM$<0u @\d}bHpbht>!D2M$2v< jn_G+Őw(Nm$/W/ ${ekaxe.nk\}H8Mir?/矷U~QZ܇W ꯲j ϡn5ߧn,s6Vuۣn؏UƪncUUVEg~PҍhZG(>rU:vx%Zt,֬6sNq#2_qRz'x#Q+4F+e/PPyYJv 4O#VH0G¡9%FH4pxZ '++4)Rjq_-- N|u5^$Èxж!>JZ޵9M%Q C!,~el$ILuxӊyjf2ev߼lZ_=뭈iد{h~Rz ^ady$b'A,Z^MFs9PӢV&& _0zkNơHU\~Y˓#"CC i, @OAw:OvM?ĭ I:܇UZE{{ %j}WIrl(A~*f2OvmC*NT +v|tގyPT]_N6nHN< F.y.Pms P;w&@tFZQTIVy'٨II$/g_~"JMDTZV&VpAs$A1>SxvW. C45( ('y"s:ryÿt"ymҶ슒Gו({HT4LhxCߜ~5:P,9*w`A+9]u0l:w}WNgoX''|; ?+ z=Aw|I]g`l:K x ^pWvy=&5%s?d*6J;?ko+٧O?W4|k4 c,ZVD U>IO#NxOXZz0mC& ͇ŏrӵ5P[5/VB<=&15v*`M 6T!$MYlOs Y-]8]կb~P^4M.D;u|;/I"Ж&ҠKgݯ]k R^蝣yKnhJԖ'k)c˓1j tz5^W-Of..xVj4tAEWтe('&\z cQY-'2*+ҪHmW(D b(pFB+i(" (âts.G`/T;lr6enɨnn1^!x(mkzy[H.Hp2`cm=V] x0~mkq6uG)1iCvU 9xJyBTkd`1L%PgŖ{b@|ODۢ5e(S _k,^"xNVGÓ/GD/vZ-ک?磌փ''ŵSc8U@7 A[g/6dn="C&`Vע 7+kk/5>o|鐹g [< ;*E?*b$\ͯ2B?>LB(og>bߡ]dk|aPZ҄_ˌɏ?qz~qЕx ~S~S̱t68邤p| V J7>=9yWFC@2j~!7tlx&5!pѦͥMhz:C$+1RZb/z?O/l˳Y+m{ʌP(`4%{prXFQ*ea=J`tӁpA"r*Nzn8&t䍊x ]'uЙኳ<-YqŵB0 IQ^gTa5tDV'ޡhVb}$by4ޭ`:Ml^T9h IFIQ''  V{%p0"R6NTTTȝ_eD՚Hl@JyQd\y,*F(R R)"CH& Uk7ٍxzjEɰUr&,'b@9M#ZVB G7Gblh9 Lpf4J=XyJoV'Ж䤪hR{*NK*ԪC (/2TFg2t`"CI[dj%r}ݸST vJm3q| \9#@iSL@؊bv)n&꽔R4sOm@>pCCk l\f[26z&@Z_ A0]e[bPR pA4h(;Ћ4eNɅΠw6`HiԱ@R\ª%hJ4v"pmx~9Mh#՘LVh|JPA<]+c#) xIWeZ6"-R#?qvQl _n 9tڒ*I %KDYC!ZQ45Mrfgx05hAQ2v1M@!Bkcm .uZy- 5s;)bvx;%BʨI@;ORÉijB\^S2T 'L%12be]RPĩ b!V]nY*tvi"Y`y5Z!rk2a4)#Zj_}0%p Yi%2jY5ƃ+I5JqYdC '؁xR PDUbV<@@+  MTB'eA+1 aȄeJ:%XdlWFaz& \(CK cX% `M 2%xL^ ̱H-UjR'HGKQQ:)ld%#$LYBS8*lA\j ?v(CF 6Z7Y ă,*H{Yc͟Wk {Ie7>y$S`J<+P>J02`^5*T *ddEd7nQ֚)b=9gqz %7z @ s[[TTڷQ PH"`m\T+x cXH6D`@Yei)@!zUm3_E3k=Xǎz t08ӭ(]BQ rLw#J99 ePk^923Ey%ӭ;ǘ YYWol^Q) LGEjO#UĮX x0RTNړ[жA d޺Py1\=?L0&sZmo*<HX?Bf>9K6D!q\` ,XJ-i)N2\ds!h`uiͷm;%E *o/}&oU ԻJWa)PH0"`e]yn-r1\o 1kcEڢwD"7lAɍ$דl{}sskӁI f!!OδXœM r?gbr3wCxqH U`hG㽻_7yΙ͆{ɖ?W'$% .ֳR qxE$N5s`cYk~4UyknSu_fJؓAs5s Y <;d \*rS7 ܸۨOUC^vNrP*[Ag8ED,ڝ;{3)!; l34Q׬) )䋫50BiTMijoOq^12{45kS0v7 -C05me ۤ||7\\hsCuHcdXus4ih]n3_qՒH0{O C7YlD7$CuhAٌIdfx5DΞ1;Jz{q7z[a 8BZ 1MU_EAa4):Ҕ]r6a('2C !g,-bJhS-sn\cV=*/x`=u|bX"\I>)G׎ vvh5۔ǢYMyvXV8p,87t]K>S1FyA}*\q=pv yR\%zr>P=7ZOK}M7>'OHlE>6!MO!g3OwPjݿ^b<}-f\  / ,|PulNi=9V+-)/{=MrI1]Czs{pn;?xq/Hs]Ӎ =`5#En"d]6AͷnˁpS&m) -93&mv9T+FP۽Qä |P8T\R׃D\U$5x<*(?{3P0v5>aFɠ#ԛ';{7r_z;jJ lAw?wz s3kt8>$7ׁ&V/8n& +SD7/FvB/8N1Th%aPoƉFHZK͗Qx˜ &t)8-oe P‡P̟-_}6ӫZ}+8rq@`*E hx,7#sW"tu$S=#ePʒ( /jm5%Tsxw0) 3,#b+TB!#)#2‰H)1#( x8;#l@Mxuue@L )VPsk1\)(2B<Zci||,ſ~MPH$ x^ʲE PV ,",fV`+#Y1 JZ8BD&9#@pؾa(5,MDI( ycg &V48Vr2J%bC#Y@mRG,'RKM"sZX̥l̟D:VV:cKTb3Atlw@B/3 f6/~ԒՖ%INkV%+\R7GzqV, $y vSCz(YU^ᜉb%HuSט+?Q bW\v ˒0~x?~#O֧<^|n޾᧧͔6醷 %o*~ ~r'h\7z~yQK[![e33wf6RB/ h m"~w7pU#$bDǂwo aBVv3 !U8/KqY.O?/fO _>hqa޻햤݇}6ގTtq-:'*15L;,mhNu᪲>E}ꉭhunөIz_|4w} MA| ۿUPQ!JDeHz73X eC*v|%y]&9}{R\B>/jԩ3L1LE~c9b$1D2ڄh69Gq S؉;šWlM)'큸37-Cv9%&G9{.imX4 y_Ou.]-PҲ qғiuGP>Z㳦Ѫiߋ\ݭX<]QvD5kSt^7njѫw{ X `l4'DeI= yG5 }7.GC2*q]]D jᄒ˄u&DEʨR>iNR.øv7*"iEc.ٰbZ^R_MȚ({s5j+eǧV2}J/ocQ"{(qDuunA:E{Ir!CxR<{eF.Вchmg}sc$ "&pe [z ˀ$1/ſӿ/R{P&Ua4 G#'FLx q=lf/cMݏJa㭷rgo魬_N.(!JQ; 1JR>zns.F9$yM|}.7h__زvQ<1aecz,) nJnp%mI{Zo5kCb \?.{ȩCk EG!јrt Gt 5\ cPI#*o$RjWsjJ"ïM]K,3o% TGP)Z!D)Dӿsj$H;jt 6˴սdCMtvSZ 8Q9&#Ds6p'ЉNi{.RVPp7.x=Yz1KްyQH!!0 D`x1Za6RzR)WBЀNezA%zPTa0Ni1عe!(Ͱ֪}2R9텋ŧ/}) J/WDTX5 Qwvs9鯑 Bd໭;R0kuMP~ qe-EngDz6rJVܙ5'{S7ayOivg+?co;"m<_^ gݟ]u8ϖc,y df&\'0/? ^+zvI1fa(*VE&OKU  0 @V#|. '2~$9yŵY<_JQV՜i:}8ne @24̧y!Pje/Ɵ,ʶ߿fOG\ZƽxMưfT~/V&[y'MaSK}3I5&kKroqSj.xfwܓ*b0蝔 " a&JF!)P 8 DgUІ`.@2b4)KH4j%`,=w\^Pģl> Zb7.Ev7"{燍x?YHP]9d{Ѓ \]߬7ُOOә0}J?;?OO?bM|~-s3|p/|Q$\W?;I%hNxw=U3 PSWߚٌ8fq6#fDҜG-Ն1CPf&#*PVEkĨ24Jڅ*kߐU((7zW4~zvX;jĆ@2&F#".RWݕ[h J%P(}CAJ|:h>dimjrs! -srѵ.]#GX C-H@\t~ef;ׯ^9f]N˅Skr bJ&9Nk3TIt2˜ $dzΤ"+D\uzr4DaEA B俗uy/u+1>C@KIz:id? (Zh8&stRa_zfox`Df/w M8.<]u!H0ň~~H{]PL%CԎ9ƴZ`@9b$T~˯nEW!PVK?-Ž/O=~q'#J=K:8K[DNn^ԐYUQ }vOwfŦ Ֆ I9eg4VWꍐ^vdmi%{jgTSbgp\.w=w(^}|3O X!ڸUG G݅'W$TnvܡL(9b F; LP4Ҭ&\j f  2'7'Og˄?zUb-@,^3="8i2GEg 1gK#K6Ň9oz魷Ϙ90,Ů*a,'փ Ţ R_)w?=H(v,SgWO`4uG0h\Xcm7#s3  \:\Pi qYe @27v431iLk5:WŞAD^sa1 3!r~3 czuBVɰM]Jj"A2X &Ige*I4 ^)z` _NQRAVf96F 2?zڎJ IjZ)uCx0"HVJԖ$%Rkt$9EPȼLO^$a+/_ZLP/Pً=k%hNO/zN$GMv("ްɬS;b3$d~ (vc&¤Að~0! 80tMy,%ax)zxAPa! vsGn0&TZy?T @Ԋ@n=`υHDrvy8N V@`չ+ݡNN#6]WpoLptwWGd;$n{a@" P5"0qPs$jʜjBW?l aVȽfLJ BY0GyΣʤ3nvpjzDW3?Q\`- b'JN*^9d;,);6]Ya%K D@'Qr7VJ.O{ָ *{|;2Ԋ✂nEdt#i7lG3ɌZG &*JԈ ƣAU $$ldX~!!sE;RC%ifky(2g:CU TJJr;Eꄩ&CjÈFp Ur&X@iY@3UBuSS!!e5n@mA,Jm8uruz,A\>̝^]U l$^|d]]|>ɤZj^ML|V:9/Q rJltj)Rd>oC? F/8_f{i?+w?+W^zo\DdJٟ[M@ qBVKѩ]v$ b2ŖUM+W륞Yޞh|mԲsľZ/*Ei}k =#_r 7<0-zFcȗ5Cg X/D>R")4(Eԇ VDX毘JP٠udTϣrf&)Ga9'2;2($9qDITǛ@#ـR^n[2}i)U3sMb+$" z Q\KjRYE@a+ ] ܖh!Sbt3! wDZHH3Y~pvZ'cʽp : DH)L[ؘ 9C.Hɴ j6D-r)!&)\ǰ(5wFsa=1sQ5CvD y & SpHI% 5ZEJ&\)>K#%=H7.U2`'ɶvz\4(5h d[5ڭ EJf(hqnkf [- JDv6mJ/"Wݪ-n}H7.e i|:鲼 $2سml;ueoͶ#yMyr'ϑ\jOwϐIg*Of]mZxmzJ{\ F_U\!:Ep{ֲ8aʎj G J13 ]`}>Tor!CmZմiImJAkˏTl; 2ۭl>V}nyqDMᄏgSL[qn*1Ë}O -_Q.?/sP( NviMW7r>V t gl5YES90'ϖD(l7ibI-i8DULiLh)5H`KtJ-Uet$vQW?nP(`IcN,)zv4:1et$"-AkH`K#o҇LWP`Bɏ>bdOXh:%ZOd|6IK2:i҇0.޴#%f6KN4jL_\ +LY"=Ed?l&F"Zs.:n)$_ lͽBʼn5zEn4"`wD )ͶD~>_Ų Utؼ褌Izl~zjtQGIuPRT.\ݤmή98_W\,0;^'ۢc9p5& t^ ?_^\1MΤ`g(.ZFmclu>Hݝ費m! 4PA%|Y(8pA$qϬ0EP41j>!"[_-@s@-K€6eRm2Q>oJn{I1(#8R h%7HB8_qq"F?).e|5PvlBLuUW=L $.)iJ -2^/Cc[.u+^ ^iB݄] v&lJO&̱6\V_19b|;u-aY>o ,b-stP86Q3'Z)7WGz9򦷞^A.0W/c{Z .j%u_o[OhܓE>O;dRUJV3dUi޷kc坵㳹n=m\LsRcdw1JuŖ768H~m'CLk5&29/!kUioYFSb#vH;"1ȕiQV6n]) WFmz }QU:zcxw Gxí}P8z y0@zTSgcC8pі__@G.up8ǯA}uY)_YM叵`QiHQG&vRu,a-&$e~1߯xknvz^jO4(#r񛝏/6V:>9|z{/ivyjyio m ¿r2?ړ=J0U)P)ز3f8SÊvp~/߾Y<!e+3g_Nai4{vTwt٘=!Mp5w_ݏSgV/2}vi?zvz XinH8M@H|$`.}X^׽pᐋ>=D+ܽnƾ?v=ex1'm]'(kyp[oz{_V'.?O>L˄o[ݝ eVBv 9N(rg䤍ڃḁ߾M6kL}\9goaoOƼ $bOV\t7쥯1[.¦0_~pT{^_56QO/h4+6/|/uN[Rk.LmΖJNym-P*5ƶƞ3|@Ht$h٥^{&ȣ.=:UȠZͥvG<9eJ̥: 0;Zz2HYCP$y+.\pxަ67yͽxv瑹lK+GM(n:gm-26hɥKS#+jlSMNI. EMva x56,f<ݷ:fwj܏qƋ _OVlxoØ|R4Υ6 oggӓݷpn1>p!˨7J /j'޼US|SjNqˮF6>{y>? ӢQ9{j/M|(ǖ4 %Bo䂩Bdr93*7QqJ!xʾZMQ]䒵m_m(^]޻0ThOfn1פLZҼDܒZ(S2rO>Y;^ՅV 9k Ɗ\A詩P!4`حzbGa<ɣ &u6CF[#74#I P`g-%e ZzQs68ܻ̒e2yFK[WbpXGn{9٤SW|& BS Ȕʸ)Mio-5OA E  0 lF3jLzNѤhYQk`b4|TIʯF!jB+i*A᪠a^|Tkmp3QYo3VjѦ +9!R=woCq1 &l退u0~5#[;?nFk]wZH֪Ur $. pFM恼7mvl* A6m *C>jv{Hh!n {28l#F{p)IMi唉9q𮤠#RR&ń{2xQB,t]3ASB hF;C> A0YdSQ3a!b*L0wkK8QIyP$LAU7Ԕ1ej (5Yyp{XcM%v;\٢`VuiW &:4(({"i<5Wv}3$rITJRwP;{4Mvb UD=T>!-QjS@(6])ثsMր$BFaRiG;2F|kU jFF`1rߵz.)ר*T"?gM)&>*! ҔOQਧn|6%>7.QcAw 0gIQQZ푬  DPrDjB<00 hƌ VfC6S>+HNƢW L SŘ$%2 >δq,@_?i0|NET!p*mNh˳9,`X w8HI@x8\ ͲCz 9 i`I!O\t.+C!Y BtAk*'*Zeԓ.eX?`+]Ъ!]52%XdC4*:4`< L9׀2% vd0  ;N(&ƒ`y 4 ʊ:](S+rE 0n`3u) ȂA@Dz-#CUu!Org(tAz;M;*D ^``.})Q 5à|0Ԫ.2ƍ8uT&_5Qظb(Vl2JE5nX&Me"4 |i«PS c =R eAVݓ.PDA$ufr 9*hPH3'hdBi2J-Go&TY]Dd2ʰ:@gƻRk$o;84i@mܒ a /f䥪*ʰo(7 8 UjD q_ {aV; If?i#X2fUI Y9ErXxL .އZqcI ՜@(@#UF YskHtqXѴ8P\ %$ A}Y'P@ɘvk8vTn>X:?)jNR6tU; Ad-dX SbvonoټqV d}ȅBOE:XOkxhD(c ٣Q.ɧxxЋIuDoWD-1pFMAS!v15MZ%l-ѧE ٔpNJ@AHEWPPl5BBrFP,"{t$-ْ U "25;;/C`Qm4RgRI+12@~ЃRA*8@C8("vVWhV5]c,Ұ2ucF:P#YfJ 'J[b~5ƒ.NY IC5h2+I7u i(ZU5^PQcA-#M*w(aU&lf{L0$U+ SrMg)m0&Ǎq7 ta7?=e1k?˹,=iD$kЁl wP7ۭ6̀-=V``u7ni ΝTeQGm[S2a ^y=yp4x&h312x4l|Ӥ4#cStF𠗨-alM1WTs^ 7p]"ڛI;%\I͢Jt L@%fTi  ]cl3m1l]@ v5y"kcM57*;XIy/*ahI6(YT1"4[(#pÎW=uS sOj#7rs]*#QWۦwܬxõēV=H՞K IXLҠ@Zq!;69y3:?T >侙 +lUX[(m@ <zx͋;lY+<φeGxF|hc`T 6C³37G0'm?Had3 /< : ʻM ,|VxA +FN^( VI>6ttSµ %1'$=_d@)7âaQ[C[Gm~PgX?T?^W_4{",sn]bnu׍*H"W]synx1'=d<{RBٻ@HfsbJM: +Um{73/ ȫ7g^jI=UC[7q ]:M$?!ʇMlaOEYhrT3.R2{aMY&VJJ@Bzl'ua֊sڈ1Ouk7!=wo mtHܑF\naav&+b+tX[ǁ=jv?x= 3D=C h_zg;zVh8/40=ݞy_SOx1λ즄p[Q`zumixq ]ƈ\<7Xx=ْS_Mlٔ[?@l}AY?==>;=냹A`_2*g*ۂe*, 'KHQ>m7x׵}=O({i72 a=C.Մ;ie wj7V7ȅ]>w e= eW8f;khH̘ 2"BHo)ҐR#vCn Ôj HZ(nLtQU9+EqSt0 TRU2 m*F\nl,B*0*$Jt{ $ F(R@bD80᷁ A덶Ak4V$,(9+<\$t21s9+RD.xB3[:Pn0P:Ԋ*QRME[ OrվBhoUA>cR`T6h:j6ZQ!U 9eU9>`I!G:˔{;ƁǍYDR0% JfKU$3" {&WjH_.z::.h0Be}sA1 [A*X AfXe`6B"+l`Ǖ˜ƂG*SGdAU~:)'(F{Ԯ2"K  y5n8!AQU'.khVcBi[U cvG]zU@4mD`P@d ȱ@jڬVk%ʏ<ʄm+SPz[&gU c5ksh 5C!}hTulX#q&*2Bf_V]$NuYObI%892r\4BQRl(܊%~* BDjCO6!;'|/uh $}bT>Xm+ eVy} ݵѐW'Q#RCۑXTN1ȲmO0:F' t6g`գ&M2ou&, xX7ńE6[uhXP:I;t'jG_0aFZjm>Q}>n: GNfET-CYY p1%Hv9k.zTcQ~&ą!H)FDN+Wf2NifFL> Q&H.:ddP*7x3$nEf6cƢs*Y)mٔQ^g/9 V6fT%gmcV@Dz2@/˻SG,q>e5L}m6B@Fx4=\LAJ,./pYDh)(ʠv1!%%LňVCh'M;Gda:AxdWn6̨D$ɪ%W6Ԇ*pG H @Gdu;sEdo,DJ3%jljs `?A.jDQ5"cQyH? >QQ|LbB%-@ A1֕9ZslGϦ?4smLQ@E ԍT#U6SmJCؘ40I5MgGAjÀ5f>98mFKDm7#r0[ @45PsŠtP&Ȁl;\MC*樇2tyDi:XN)rd22TAjPj8PI ykPZ1ll m3o$%]bFmo!ѿAu׻(/pF-uLmBVeF*XQ YWg1Ls-8uA-@1jEÏAW 54r%rVWjƀYػǍ$Wk> ˴{x ;FGJ]lH(x%U5ScA}6Eq|01H3Ƥ x D5 P?/i 6(&J|շf<#  \ρi |4Ƕ4F+|;MżRU20&6Cy-j o, ^u[\WnU,t~EnIv[~<:~c`e^2Cʉ<JRM7\Zx}uP%u^PTc-LN+diX_'ayFd yT,D‹El<_HsrtCIvL(Ny3NXBRxfD;$Ђ..!JQTN[*Y0σ6C/~qQ8vՆ> vua!*m)0)8d^NLKNoB"ߒb24_܋#Hȷ1Qhĵa\42l J:F9 "#|cE?$/F_aߤn P75b@Wypug?X=GT;ai'_fbsìŸ 0nӝظ4Yn᝶~m>Z7Z%ix6^ډ ?ZkoA&f/6S,<$x~fF6E1".' mĒ/ߠ _tXqFʀ ^ OaI%(5.pЈ5F&mЁ8@v/HžfF)@!"IC(SFvILGmȗ(xoů-xGg9к9j'իSþ;Z# =~{")"^+uJE\DQ4y`bc Vb(m"M̙`#Ж oK mBZ+_L&G\BWSSjDшk `ϧ.=+5*7w&\qmWȮae/=?ײޒD߇o~?Mz,+vN]$sD79!(zv3/Th|oӫ߇؇yrw&ж&yd1SHbG!X$%\T6kia۵ ?O&I8RȣԶ:$Io=FC W)ի_5-3$4W 62~0"\2F [s+\=a^6H#. 1 0H%υH[qLl:PK1yl"O";  [KWVKXq,ZL[esSmWJY)8)ئR3Xd[1!8#UBroկ&YhM_#n/M&lj+_뙿Z]G)bw; DƏ&<ϼ$ZU,ask,S7r* lvۿySn' }1.\)<(~`|<4lD>5JoC,o4JؘZlţ}ې D,04fGv8Hfhg50E [g<;zp^$%6qs8\-^- Gaj~1}=Z ϡrZfEb~b:$;6݄)Jm!Cf9ũNxYCJli6d` y)03v`HUָ:9:cr]M8/Ɠ`< @7j2D  b紌LQ&K /fbą`y` 6 G=,DXvp}Yĥ,7ne#ep5x(K*q&)UE9OEy巒F~iECҰ0NߍIᒡ!kcϯ#w׳;OVwODNWZѳHgIEoPdѹ_vՌPd; ՓpDto :\q2j=C[~ulNdJ`Zl 0^x5/e;tkg^P =;:ә}J-I6Ḁ̃,Y5M 3 0nJMn>kw jʆqy*^@a MՇ|$UzZ=X()9eVe-avZYVȘmra ;+G+Yavb%yu\={=/& [O.BGsbtO̥CG D^<YQ1ia".gRk#9Cmيkmv@+.ԉw 3CH@/쬃waGGD*.Ur>1EJ_*?I "4;I S?rvɱH+vX`jd,y86SlΕ@_=Fv6ګ0S>$%EKbZyyXOJNw:Z"$lGl&^̔!N*ƅ֔qC!~hf<>5Gϴ`͎_flcH7cr *C"+WZs FC+d_uC@%&I62UyK/2؋ OzSzDJ4GA~AG:j`:IG[{U}]i[XIRؤ-L[U%3%z]0?6dx+V 3D Ě,*Wqǹboa;#7^aOpNocDu%Q7V>tQF24˅5+L7̷b8z9Ԑ\T]$ȃ]OfEi` ǎ nË|3N85ahY;sUd6ݸFY,<@gqΈ^e?NwaV±ҝW]A6, g;MǷ-~ܩ9b:DH"6C&*d͡3.OM vEwI؂('5 X9qˣ %sN]nl;F wrhJ_6&;۾9l K(~xhh}TDZ_\D%SklƲ-iԽ;<&\@.dnlX ݘ=BIJV%Oj1CۿL~ ;L(}tN9{_pfԁvg[ 1[aǰ:UЅP.m,ۆ?yVdY8Ⱦ̦?X^#:vIBK8:ml_l\hSYIlϖ-h4lngհQmlDIo^l_.3UZT[m ><5}ЧFhqsN;xkkHȥ2~%=&\:IEdƈ9ٮ.݄bO׀K.i5Aą}|Ӓ%ʏx%gL b+4M_'~[ \pX(v_~X1Wr.ë4vtXjx#Ҏ.녧CHSqӠw J8V :%FνDwNWpR(s \fųYh €Q)͞C sӗ$q, "8nRXemO2iC!*hCwu szLU Ry!n)nkyOqܜֺʒTS-[õ&j)P"mGٻHL$ًI5J 7m?Eݻ DŽ\ZN1ӫ _$0Uim{WOVhG2gV!㸛/WvvqVmb/ OF&f3" xúۂe2k_IƸ(%=(VrqLL:>*v9 11!0ߛ`gM`,}`|ދIUq^ _"*BKV{]~:xm?XG".hSԵ 0`C]]2k@D*Vp,cu k0ֈOKgJX-HД \"эZqkaj E`ȼ޴mBQQp6?\M&`a6G.q8/Z0 $EhB[[ =%49c+z|kt<1[}s 0R|L'fvQD#5S0꾥9 #$ܵ v+Ԝ!,h9Qꥺ.^:t^q#}6F΅dQpe_ۀE3)\/ݐT buZ^BuX8JrpIln+M[CuN[SS;Dk} )[$ E vY;R)u=J U))O ^]F #⺛U1< if-ͪ,^'-!A]KٻSrAB9HF]"(|r9M$loԷe}䠷% w:DqCwɞ7/u*@g򉵠G+())L9ʶXWlAB_v ̝'&ebuy7O%5Zq4#d{Q{/~yV,Iikj]^ }W>E/e"!Thְq ':f4siKe]5Pi}0A͉dHcz j^8o -e pWCv-&R>@/Wf52ybb[HeKiD[KhӺ hoݠ玧 Nd~[-I-Rܳ91NUN.YSqo'UEjFO7Ϳ+~"d.^[HhL !C,,vGqB0 =ηP?=ӻP1d(ù?51Lo>F+t<\̣`$3 >߲"-A`Std?=?=IfWߜ9ȿMqH c0bLQ;O,ӇMLޯ㩁. 'ޖv8r>Dד 7Cx{?;?Ldzw9 ^gmXzj%ا( /_cȒ#q,)˔-Z˫Ԝ9΄-Qq$@ I=:Hv!pņ[yNgjq%oB @*6HO(-yejͮ꯳ ?pz2 %mq}X>d֟NO (w(g`&  a 80Є z8Ku@0w%ST`R#^PqJ%I(HHJeM-ϔ86P*DE{'S0爲vR -<Ń/6 +NI㼡m/ o 7238b#{/ nˁ\W̵ؾ(.7M I}GQ'F% Q['~_CJLe J|j+"JW!αkbӗtHp٥|,PSF;G nV0IO; [ n(lLLG"!"@=٥MA w&N[ |.=83ZfN/mFu+LYiSP3!Z*dn^9lČ=؀3=dцS{QC0H;NH$3\SĖ4h5tKjr٠%s3ua ߅Q!!uu(D٦/S>^S})Pul=Aa%f nv;aRgus1XB"6J$dQtuGQ~ ͤ]:=]4zwX;ݳGC}̸f^O ]*6*ַMR}dmi$w?u}{\"#)}w)m #*Mf]q3u-<2c8r.{:UtKkLur< hPl,If3#^x\ϣ(w&uo>VL@ 5?4s+sA:Mݷӫ`&Mn|Jp?GPNo|<{W hH㈣ !ǻ:䳌vֽe/Eԭ6\ļhJʍg*BEbyuFNv=qI܍vaŖkʝ_i`Gz3? Z0lnwBߓuoYu+ZFc}㳣/0?xeGד4A/3;GBCa\͜ϛ)eMqi{aNL)I8W=tJ-l}l L\/!o:¹#x8!k.4".6FSl#@Q] 19z:#mօ,NCG?g/ uQlf% Daga{YFK@5_6 .n#vyqjI#AC5o.PL`qWEo@2p~m?f@͝ ~-8jwaw;8Ё˰]aAkLwr HzMߑCzf~}y<<֐Г!lx0{շ[rFn_E>`|i)ɥ'/ʼ YK?.dJDʔ8G.z{"?ě =7׃2wP߫YPԢ行uKSmiRq5s9e(Stꎀ_? ,ؑ׳?vVِ* 6\NM2A_d 5HYpy|0 Wn̓s2N|%j#!ƒ&b=Q~.c#˂0;3[{*U]wIaak &ShZ :F +U# ! #.9<J [!Wcaݻ%/̆9 D3' T0R E ㄄F)Ġ/.[_Ak_"+g.7YuL1O6ۉotDu[G#LDpc؄;`!;ϯU_Szn7~90Ѕ*4-6и1j#Fvb1b+K`dp‡ nl },:6:MSӟZ Y*[KUU֪Ʀ_^O >p}]Y+s},5N{ZJ5*d6^rWCBx;2I oLw)ȑ6ѹVd糿q\,y7D~xwu"DoLOg3[Qs1VU)ϾN8taDɹ% : 3 WWW+!.& C-3*Td-#GX̉T@nlR^q;*|6Ӻj5!%;YE`W,-)˞qb5t]׵c Y11ӀJP܊UʦD"EJ=(CiM(9(l F@)[Zy] 6Ĉq֕2zmChSRiYH7;+w[#U6Q En 5&eDvdbAW5jC8eMe($nCȸGjH=2S1!IMTwe˱ߙ"W׀Ϗclܻ@;{@;G-j Ѐ]mDMr;#:]dA/,ӏ~dPw,J(sǪ5O\jusYW?՛g)&8`LfÄlw YFu'kgN>G#)N{MfvJl;%g Nkhă.Ɯ^IQz6"]3"!)cmҥ<ޮŜ.B*=&t 9v,]cqdvegI ;q en>L0pKK8w2Iuw]+:yݻ(o䩔SB skqq54ܠnP̷ :&Zqh'1THmFLrA!yQdy"i{UyƝ_)T5ЙG ax/kN@Rusav0St[W:Z  D .r*$G3kARh¨vY89B2q0t &X9.xI'(eQӗJԄ#^WC0r` x@͊-Vm]a&iSEfJH" lrZ;J!w43n~WADb¿fr0XL+HZY{(Bp4EG$Zɴ@ިwʥ`v"5 QTlmc8# hS^la6lfq2l }1K?_Ηsc?,1 Mp_%ٌ`iwZXlSOH`9;>,ܰH.95܃!<*?_xXlJ ,sBN;Xk7;N6؂˃üW'?R̎0D0#=ܚ|{2nyHpŜnvGe7m}cg@Uң$ıJ2abO1FXQMJsPk F("Q7G%J8FX2f97 &8 j+Kq)NԣY\\}68Z5}&|UA‹Jat  cdX j&# ǸsHx7D^l_(_I!}ްGx'K5p|6sR._mH3n!Xϒ_>߳/j=^!}]Sɘ rroۗ~dFwspGxljSC^A\c.\ƌTK0oa sTTuكnJ Cr 4#ZhKlF xhԦ!49&A^cZS0jH1.*!VHzbf{K{%Fwo4R fo1R1*:eX;\yN94B(^T[:iSz._ʥzxj.@kw8z pǧ@T+P~5EZ4/(~D[uC֖vP:>mxpʩIw5 [[% w 0kO=ŧHR!qJo޿Tk3%LULz+)@G}|]RfaZqa&2we-s}#4>ETڊ P*Y=Dh!`:de$qL*]7OrEI%8$~%Ng ΅ EoHF, FCT312\9laʰ ,Q!%*$2GW&$*XhҲ+E}Ą`( ֽGQ!Q!x0{(P'J!L f u:ZbA Z *6ؼAS=ݯꅝ gÕVL-XZ‚᧊ēw_0l6EKNS6ϕRU!P>7Jit".:hT \>.o%$0l c$}&J)P )A?>q7%p* I 5MinX I"cE|_B/EUKU֪Z4t{Q+R 3V@(Jwr"VHBBcCLJhrb|g|\|}ڽo˄:Ϗ?5:C,j9‰ʤN)% i:c!X+bw7&Wf%Xdԉ U"$v"ף JOſ~\=mUb"ۜkhxӫN *^A*qiDȠ?w 35Fl.טҜêCjx F?΀#;nSg\iPD2<[j ,ލ 0%zwԑ3>e!Uo=bs.9ok @-x㮡mM:G! s~.9;#J'|q7syE*ȰW Um윧c0 ZZۏqDm!tk:$꼱BsOLtN98c~3Hynk)LdH-d$8.|(9~g |g0X rcTZ9CQ)SZ4c[aGE3d0WOK0 _dHEZIb`"(+<#"l RT[1'VG o$MYܿHTovܖ,z?YneȨ Xǿ̍ &Di%ߒo߿|Qy|;иC~x ՛F}Ũϟy2On.[ϔ?31O{t ϻC !&`-}͟)! =V/fawSq>cMe"FOӅdϐa! V0o=bT2jNaܨPmŐҐsIMr3;˿;Z=8Ƶmy4f~5W\Xuuժ=Px?Ca ."\A-m ׊坒n/rK#393A3㈮1xyy5^v>NfWW1R75>£#jdBkVqt㳁غ΂ۙE @Z)FOwnBU+咶h](=9xkl)wT0_%_¿EE}oT4gR%mP:D8v5Cڜoz9@JztGKR&AawLBR?(^:|@&EnkQ$7zpn%m[=~ dzhڝv'$I@!T=nc%cgڥ\,_c6lnn?=|0KD+lVs BjU|& yZU±]]%pЪJfESVŗwݦX'jsdqx OwIF |Y|Jtc?^aSmN;}c[s@|i&m*GW*~5ēuN4N#6Al=\\g%\,cm=bx;"ۼl ֳZm&8 n|:XB^=ް|`YxӵCaSx~u,TπPfBE&/ݖ=Q CfXX(' JcC@c'@&.yԮvuJyPS/ߕ<ߛQ1=N\\$WH.cyXXk9 !]VtC_'O&f]+aRqҝg%^0<$^ch_ w#DbkV ;}]a FT1އwYj&AC$zrRK92a$5B'$S!z+yVQ=Z=r HIIv-өUHSLcd+Nvuq͊ix^iq1Cr%XyJ "h Bs0<>s`\qeAHdPB\D lӳ,]ϾNg%ESyF7j<'*Dю8g*LkiN9ʩ8î, Ëa[#VNk-jFOS$W-T9LgL茤47"&)* ɭ)fDeRm3Uw󲻂IAwW8`\a7sy%G5nSbpa ޜMK…(Q a@фٝ.F^IKU>:˹-\qƎ|BgQVIa6M~c[FSpͯ$xrdףa~ude^Vyfϋw y|VW,qq=f2Z׮Sp,_Key~9y9%"Igכ%kRMxEYA2J\(`H4oI#$4K` K-MThR 4ͫ?=O2]@~X|)0Z+J7x~ ْk˹ f:q>y:HKWIXJͮS=j[saO_5j&Y_X7C~tj*4o.kܾss'[̥G%JBT.6IB1;AMddQLQ€a)bc)IwwL̀Rb 2HZĔH{leb0N\eD%Y 뤯\2Dk3(}t r]s{;kPd_Z3ZW^p!v2YV:w&)qn+?53#$SC yzO_ (n*Ȓ]4.|^{ [$x  փOEf+zxp[380Ӏ>3j ^On5)̨1?:Z|ӣ{\'Q`ۺ]'>x=_'0ͭo )ނOvO[pna-:j )E/&מpv&6^x !͘i{77iv}=&Tz| Ljؽ{8vt sEPPu1.97]\2РOAVЍ8a ?%?A$L ._ޥ権)QI݃)`~=(I>Qz@"&̉;v'& u~+KQXUfݳO7dJ*E[B_-Me λ^< dHG 6pxNxcE1Z5d#'W yOXEC.'7K)%%p em\!D3hHRjyi41d Љy]řUbjn4yȸ%m)LĕV"9ncRjb']O܏73K)FQ;2kb m V:ck; }wFһ3 'zw#Dڒ0msaJFwg^4M/v]{?ѻ]ƺ^TɎ2oe=a=:Z~ywaʾgynaȁά_>2q+\#p .ú@s)S `H6#+葻ʺ?jȊ|Hn@8;'ԇlt"vs݀ v7`F;6(.Ph'ԧkiO9ן~"@b:fw.k8}0g;q~6=!%9Fe`{ol17|V)7YrR>ެhJ|JY珐wǯI _cǫ.K0@!F[Z ޛ&+'֋T=: ag0(e] DJkТ("FmQf%zmEr..$W]рvt_hp4.GxgL PPyfɛ$mHw &-\j>[B\FQI̡ܛ*AQnz[=B``é+Kb"gоn$.F*gW+pNb0VAجYZc ޮ1\]3 4;}Q؛K {kg=#A~nE)4c8sGJ|Mt`=yC?GvfuP 럕$mEE~*\Q:pYG.Q]XkE, (^jD.q@: sʹ_#Py^/kx~dxryvƛW aׁ~eݔB2IӲJшebu~] %S;PX@)^UcN3E#ƌ~Y РQؚGXuk0 k-Etc8/=%OsPw\&#1'<%VX6dۨhd1[V$ .@bdLnq6 &EǍWKl,ȒB gQRQKϋ w!FS.ݒFh7ʴrR*0ÌAr`ϭS;gқcjV-!eon|&7j08KKx1(Qfّ*B,sL8 dz >8Mg,;<5O@U=cn HrT`aYj$ Ԉ4oZL%MJL)6.D2fzA=fS 1Yɤ.|˚#/k9ɑ 0@DLX;aw&H~qiBׁ:.thVFbFܑ:GQ$AbRzo F%*k|V^G%EXo!6N]),1"J\HIҹ0FFO٣^PPWRv!p+UoАe"92UVafY?K0 `漿w}VP5eؤ2e-*mU%I3[68͕4.S W!+HGޖ{F`3K!JgyV|jѢ*j4):KV(|J(ΦQzrQY"@ȡ\HcWp!YeCL<m.Y#Q@02 g1r$E"gsf (BpAϲ݃8y9fD8s: #f&@':ѐ>hhY=I1A!K$"*V$AFgjv`#x 2nW=,j?ir=-Dߨ2Pa6EYLb{r_g$Az.8/'4Xqz>(-MMr:4JF]|;YGR]Gp vADk=s|(Qbm VQq\h4ٚ 4NRDS2{ybig22wTF;acۈE;1:.$D_# 7A @¨[5"fcy&h]'zN"3șBGL}^S|)г3ƙg [>,5_,uP+Z;wdbp,3wͺXrE/Vnu]ohNi僝'(G,}}l{>6_~xI+6|2C|{38`+/q4([U2;9Bf f xn2 8:٠ `"}ζ`;ڥ 1[$|p "ofh7Phȷ)/j\tB aXF)^kY$#D.~iG:˨^ֿt&_GPZ_7&.t3A-ʾZ]ֲﲖ}Mll-ʡR0|2 3> ,q:Ӊsdquju l-h4Hӟdh/eD^,aN9>@V5>x=aeJ笉*kGO“gWHOc? b ΢ف @-mUXzppGaLVU 438dUݶg}]xKL54$Lo?YRsnT߉%TKvl],u#k Z}7e}#p sD1SnW=.+, %%FU&I1(a$)5}wr $ͷO2\H1O]]qd.kPZy+q\o{HH7e MmmfWYfH|'^}Sx_$Ҫvm8JDx>=nû#vBIPz{Κ61NB"3>VV+YcdA:Uc7JގJ)e Z-os1lS‚Yߘ١E?{W㸍_{2|2e0%hP$=ߏݶlKeYEybxpxsЅ13Goiޯ텵yd g{g?9h ren0{`~ Yk務fɳ~yZR5>-hz|Z]9Z=?Fp)67oFhG-%3'?zZzhMx|l?[c1޼cBv7~W:)A G،ub$E8)HĴHu눷7_]>M[fnڠr.8F@tJWTA ޡ>Hԛ띗}C~}v^8MHa `$W3CN$XK)a5 V!V znGwv.J?ۘ bWդF+FXHH"RR '*S‘!L4e B+5Z'Lz/'xdNDќ餖7 ')ù[ Z*lڃ:褂ilbhBt"IhŬrbp T@-ϭII=<% Ϻ:XzO,XVeH 0٦9H89Cggn90|n~ y-'@95~ ߘzd?l/buYRaS"F)HYk >ҶN 9nT ^cܥ;E@ SY?>_^M,}̦gyLހ ow˨y<;yy^ .Қ!nu.aMl5lhnGkh?7C:p@8o^Lo6nKwƲV? +Sf!61$Bc<VI-n۹4FjjGd:W)^nv:&r9 ׳&O:دF u3ɍQ8HZKc3빿e\̅"SݪNzZE!?=*L68),}7(l[jNknv7#DQr7TA,M`+bP'-@-/-)IRtZi%N^8u%*ʭ%I7P-RFKu LS`pj@=( ׻N\] " %K n[>sJ-vLRDŽ$|PJXPKw 6)0D)rTS :g-R,o4&JKfOsZ pKgތ>dlp!{AۚGjΪ!3σgkzG8 Hd2ϓ|ᓁVucaBq!Hd|Zk/6g'bזoFMX=c{}lM]ur6# 9E(Wt~> eC!_8x&ʥق.?50o_fIk$ա'wã}~G(a2۴)@kɵ("ڲ~nys\$=[.{ HI #ww'okb=ڟ}ܬw oΝ NG?G%D=F/{P-M~HYߺ6q,5u;Tf1=Qw=VHQ֩; $#<ۤo|n=zlQmGD֡@GcGoGz45|[SfS\st(q3\gq9N38_z7{uZw{<*t8l;Fz]=P lHB"z$S"P-֗DJ30;z4p/حnj\N}#isRsbB@[9QiGr@'E7S_-ynjGdp/SKݳZM'ݶ;!v`Կx0yrꨡ]wʶȗltZUOx/}ө[ :[$Hǿ~8. !9~ǧl6`wsǙӅA*USi#S&B[O٘f݇pZ.'vؑ܋&Q"$= aYK])=ڶaC;9"syPŐ xLd$Mհ i#)PO\ 1z(Ȇ7v_v!_sI=^:1`$-d:HQQ sIb(85D%`HįUOPSIIaVB[X|SG醙ZW?+,5+%ڐԨYhfK(Ȉ[JABhʤJj}- R m'9>=x o(շ @;i#z=} T7y7a6&qaAGXr/$M֙F# T8 i03J*K/}]^ꏄ|!S$Qp j ],&.Sǃ>v!<kdaP_+ٌf**ob&R/&+B#r'ϋǣaI}װF ˣ5'?k#nxΌ /lhB=1^Uہz8^MAkU? X@^G!AOgqwC= 5Ĕąx9UO1~ ߚ;tw2{38ĮtD|^h\ަݧ 0aϏ6 oý7Id˴XSO=OhNw^[\m-/ſ-A T;ջ?-vü-ԸwYV(H{& GBƏ۵N/krn 9k:yKK:{wQ`NtMn8a%ϓo q!w{xVqލ:'Ȫ*rwzpf@z.]< uu_/Y/wCC=mB/$-Ӥu2}wC}=%Yq74{l2{?h{qRk:]; sޣ)퐃V[!7ke?Z;s3N$oN˃L^ZNa(죦Z_+]~f-m,;~N{4G7[2<]Qi -ɜ]̜aLz9g/m:Ttv@eo|]Lү|ڼxx*%c%ajBP T q~iiLQg!G-/p|`"ԃ) RJ8kwTʸ ^3mF/P˫dE_E/z:Q)նkdJZFW\Ev.DLA䄁K,kEHԪ&Nj]##$vlq6v{]泑}h}zP\+²yvBL``iك` Y( GzէYcrK&zyHHٜ+ VJG&EG"kNGGfi#׶JGf_D=* 4vU<*@.za0ьĴSB<*bQahbYhPʡ"8ULn?)*!{|HLR.!E!* mEfؿ,1?z+2]ʻ V^"_ D-!9WvSyYGF!39wLJ~2S@p,Ӑ`F1!8XCؠC1p>Lg )UB㞻\(/ ZA8/^XVP!y*Pu)Cid@]VZדu$Eˋp~RfSs.P'KYb~+&O&?.K&KN{}~36}XD PAp/FEyge7E%uLO|] }O%!F%+猋Rp\8{\TCWe0o~x{ׯ 7s3c0+jk"z+x>NWEJ2Gs:"H$2BBJ#ƔBpi8v; Y^=ڣ6m]{;L a<g޵RL]W}w{ W?Oaz s y\+ >q?{k\ѠaPH鈣`EiZűa8&iTH1$z'{.ԹMQtSM ZizxMtE,LTЁ2rƈ`Y̍YV϶'R?.@IƛroA \ *=f^4ӫW)e=RHVgpx33]]xX? ,fJ鮫;aԮWYFxqjV~k_GjJZ˛ ^[486ZmE~I_ʵ , ztpK;R!k)FZSi@%!@HYCj^[) 1CUVH.N9 FP6u33H8RGqh p k11@#,0 s! 5ķA!=C\B<X 2/,B RČF#bōWi=Csx\wT¸`9 ak&=ʶ ē1Ռ(OIl9\H =$L<5j]A{ j%EɚHv"J}BR$W&cY4\A,)T,h X D@@8\@a@*`>"B h,-Zʣ)aBT6JAƅNci. >Q Ԟ͊=o6BV)Yk'y՚wNlr(3k_6ïi~kp˪&Ɖ|^"PjmglR>fF_x)$:ص(-B0Ń2"P E(E+H T cc?lΗ~|->kedь -`=4N,Shkli|f^Ε{E>[^L<[^ 8//|y'΋l;lWEg cgˋ)Fgˋ)gˋ/g^L)>[^L)?[^LsX^L#6K]mXJ:]OBY hYҩl0Vߏ L旊ӭnüri~XșhMi,C?n@1D\ N=n%j_hڰ37T\yt([.ML'Dí$6L [r&eSܳQr1hb:&n5폻w4nmXșhMZ?=薋Aɽ6pS{F6,M4˦>Qp\ N=n&W{n݂ [r&eSYt0?薋Aɽ6*-C-7nmXșhM7?֞MңbtrMt;`~9-Eѭ 9sͲ)Ytcn41{l/ :maK-ѭ 9sͲX{E7Qp\ N=n+W~^ւf9gAjq S{6*n8Rkvz޵*yJ^5Vv-zؔB/ u}xmzFJ^S̻gUO-@1{6=]gф*t zuw hµV=A|r{ij]UOPzk̚"՘[8i>j]UO\Yu5ܪ'"NL]1 ӫ1]1 EOQWc>3$ uz5f&CNWc>3^=vew5注1sN]FZ&j] OL^ κsWcn$Wc՘:3Gu5ܪ'켷!kB*՘s>3\w5ܦ'H^YRͺsWcnAWcb91EYJN,uWcj-{՘V1w56=ADNƬ]1 l=YcV\Wcjymvsq2Z܇ (d=i1MՉM_H3a,"W~#Rl3.?hh/WMp놃/ p<, ͬLJpFaSR^nfʒo i Y }O%!ƿ%+Kڦr誗%Pvyꇷ~]ݸvˤJ@ tRa }tIQh8Gʱ aȕ JQ Bilp/ x@@0{jוzYSB}evLbN*Ix A;h1 d1.޷aϧ//103uyh~~hFM35qp/"La䀬jbjxo>MTs{ RͣAEF$ Ϭ`Z#p*ZޤPWJܥfzPM.4ۉxr[QPƐ"ӧ+o>IOcrfZwouJ!HTXo4L z!!#4O{bkqWc/uU>οfj,&ofE0+:9_ psG -ږzЮJ앶6ҨC0poog#AmٰTK{ۋU4 nIW#Z?~vMҝl͌Tk\f>믞ޟ^}<9_įs>q?ӫd[4fz5[m^v?;(gq<2 {~wпEɹ`7LWa}k P3E7΂Тa1@OwQ@"J5D:O85qu qA*CtakBԉB8&b.u*D|CѨŞ:T WI$yr> V0ǰԘi¸ Jz>1 O"h0FG,Ԡ`5D5 CֻV5ltBYX\6k񳴬=m֪FJ!Y=P y!Sc)Cn4䅫hNI<80"5Kлb'MpWB;ʡ y*ZS Fq>n1xR rTMۀȩbAs[UtKxN9jVFcr>ag;(^Ι >{GYRCb:B%JK(aHY]L܅&-SzI$j U t?M[h=;j KtZwY+T( )ڊO3vwLl?=M⟯R7.շ_`11]OA.Lڤv +烾:]V"c=~QA4(JO~l `K*!t!e;kDg4,.N|\*^ZZ +Tg[bT돼) -V-U(LHnHԷL͇N> 5_/U~I{1Qz)5uM=ȝsZbHu{f61lgYk:d_lxsOw*mnjȠB@7KOnKeJ8"i,$KŘ usTT'Zr$+tF$xx4!tM"I2 K.)~p؊2Rk' [#A "pf ZkX3X`frvU05Bc뺡R-s&19DZvC%_rn;O6 AL{in 7o"f9}zWo~5L9L.זhy eo|JzΛ#o͑VOo:Yg7GZ HUX2Λ#5n~;gIffNQ^&]HOL;:'lf)7x`ƻzkKw1҆3d6Egc'^P@ZlLgJ"].B(51Q {" CmٷW%`̿qߜU(ß7 //¿l?ǍkmJIr{xIRCTz8@:_yB*,0 `Z*6éd\S?bz;&Pω﹠,O'rYXD/fFHsOzKK.6+N@+|gEdO'q f@EfسsSR$|S9e r " D""`k2^噒9H"#R-N?WWfdk6{J37h2U$$Jve oH9{P/oC2>$cP7EE`[e8F%qqn`q@bŠp`9!3&/IWDAK-;W 9[7Ms[Vd4]~ۍ;O\ˏy˜eOKL\af2 a(*~ɝFygK?5Q" <Z*r@vOESc0B1k`q5HFo:AY$r^~0N]cs`;cxaDf1neq~"wE.MG'҇jbIY)Ȣd=aLAu@%GluJ#C[LѲA Y`rixeDVqH@Һ v@N))X s&T1hHˈF0QϞ%`Y+",~Ѝtͻ!gD,2䗈ٖJj}zȹ8K4wxba<ꐔp$(Pdt5lzXf<ar^V-+ƛ!]7.ZKW }fx fS5*D89my?*<$/wDB @dy!CěXQdt(/Dz-F+[a5c4#VzeKOCAW4ĿV# ǽ|a\[WǸw|6v`ǽo7>ƃ{r/ fH{ `ܢ9PY.M $e1ᨭׅSo7s@:w[T}?zg ֣½zN /gןlɰ%٢s{ ݆/:F*A(,_FbqD7UT*R/(jĀG12ω;rmjNĮH)QWRpS) JQ`v 0eLr4S4)`QI5tg!LudG9eƣS3e^.l8fOd^/@hK52yqm( PdDMR( G\^ 2.[H !sP)[%E9l[_HzƱ2EX&`+x/E@,冁Ƒ:J=व14%`KBT梊/?T\+FUU$I)ǔ#$*(jVW_n#叏_mcN>?gqKH֕sX \WhW bY<`@ij 0(ޛ2N(ko>]\^qΝ:U} 2Y$Ń;  DdJa\CD,T[OETVdKgٚ{3HUb PQ!eLЄV{q3WtkwNLRP$q`G$2/"AvZmXK[gf|pL9dO[<F"@|U"X+CwwUURW9 3zч;lbC8e3~YW7Mϝ,-%#d$LgQQ~rY)ƤT%KqTIN.uBwD6Di rooc_o NuSo:SV'}>&h_!DE 10tF:"ݮ(j ~wb5#,7-0KaLtr9^V}S/Sbݹ]q5YSQ0RJk؁{+s;s=5js^znT"QMLL&31қi=ĥ[1#DYQjm\g&PAR *%q* [%9^NJRGN /HE@#8Vq=ʔAAK;NbѮNϯ;+$ a(HV_qC!їu"qp+U2F)AXeLp_&%+Iwo%.pSءKl9Dy䲏GPIȳ9)կ&y+wHnAGkri Ѭ A5D:! ew3:_C()PL1F~81C*a&g0Ki85XcQzTTDf@PTV!V3gRl4lLH})vEW?{Wܶ Kd63"ZmvW9ʖ'5R5 kmEᆵ<dL\"}_~Gɀ /&cZ?mr陯VfzٌFCx[C.'7>@b;Ƴ5Ml$24hoƫβiT'C ,l;nPLB JtF'\yhOSU5@]FR͍ƫv7^MQ+r vj.[n! EXiVb4EFmKscՊ*qcF,9%\JYD,ōү+v&9MNđ/n$)f!ЬT/IM(*FuPIbe2:6+HF_υӿKrUyDe]ʺ2xh:&:vFY F}e5~3Mq<#,͝&Z4[aُmQs ~ں4DfQVUh@靶{np%5ߺ~(Qf8$ڦN5 ;!V61o>:Qe+88w;+ U~K:PгK6xҔTvyVBk*40vO9ѭ$df{#(x+-(V5GɓQ,EJGX M8ݲkϻ9 D*7–6iDQdoVClu ;~n! *0ھ=U0ISZngT1M% ֺ41Ѵ0 p#0WL;! (/u֣ۜR~C̫Bg(nQ gJC1@_F;岴[HBDΙ8lDR]s:µ}(KzfnRV(:KFfyA~6y*wN^޷.5YH<莠IhupюU'$&X941Dp!$S-KllRU.鼢0ܹb[T1g:?Ls/c<F [c??8xϯߜxv#ջחn^\_o~}˫ȵguw.;Qg҇N6-PFGѤ e׷\5^: Cͷ;wއ'AHc?[w"P^[`HIڊ^tMΓ}RQEOBC{b>tė};yxdgQo1HW_00ށ2L;ݴFXߊf xp|I-dB?(6-Aȵρ߇Ń»?___vYw֙nvGj(p}h߃^R~&(B]MuL%bmϯaꏲ+0 I@v~7g|hfpy=47ӱu QmR-\{o.\S!veߚWn޷$aȻyFnpmW0i۷f^ ;A✓^F_/5yWލzL{~~2s5$V*s_M]^a4po^k%1χ0};m畄ጯg ZBAoy0 T9:~9lH x!@Б*+N/(ެw2?8l7v{>v1Sr-AE_ P (>@Q䝍y0xz<49o:\dY@v UYJl!{RR1 E$,uQh`LF\dNXXY0#ؕ`k19xg#̙Jh^TO6DeC+j^|ɺ_Clzu,<ҹß0ׄ{jc ^A?M_W~W,AC J:{^rg}A3 jOcF뎬sHhVR_:uؑ 3-ד I`nV>W>ou5`bd!Ӽ2PQkysujbL#4L1o M+2iMGf/H Du e`:t+Jľ(;W7JA9"uIšJdB'"ƯKk8QRp#*R#FʒmebaгZ :6xQw̽oL9 ":IU1D1MI)M#A;B`_R =y C:c"N]PZyqX/\V%N:crkj_XZM~1 t’LJ[*ecR3ÍWb<B/x=|qW1m)h#ARQAs0h,! H L9zki5-.7ƀ'$KZ2f~C12AΞ.}DY%7!t G" (@/|)s;8ovYZي٣^­m?xv;b.^͐93݊;mQW4]pQg'9SOĕ}|u=\1BcvTnn/{a(cՌ\̚Eojc`6/b29e =m//f{7c-|ǁCCg⯛ʆN O Vm6ȃ6Wz|,{vN+y\Qq`1xftmQvj]9m`g;3x"j2{!6G(Up07i0LSJ{F>B){tΑ[d[8Sd;%wb9(dJͰ1Pp׊`@v`Z[]q )\\ J-(5|~uC(m {"D7Im6Fw6)d&N 1P& N BQ0$T))N"T#=P$VDp)|:P`(#dIEgT8G$fY@c:{י͡g1 Tͪeg,s@]7hוu.{{ $M&"1"ISNDuLB' {+(-jm&$JgH fX`hT$F`8Z$VFK4+05lX-eV47sM].pQ!6:dsxK1ܴ@_?Q! ֌W;Sʗ˲4*x|.^aSɜ7T<0Oape U|:p:*x} tڰe>r$ ' ' ' ND!SH" b9!!"Mb ENJ"i2.Fw\yjkRvA oyMFjInMt4vgԝH*mp{wމ 2UNi,\AV:ݠ_60UpMInio>*qӴ*2ne&n>ysÛOA' N|Ef}0 O^CC˛Ym X4#FBIhD`G4P!Acb""PWT[򰱗] N43XX0Wkr ܭuyм'_=Kcp5VgҞ܂.qhC72B4vXʛʑiGEbۋ:ݻ~߻,_,BySHB[DzAڵA,{c[z7,ZbTl&d`BD)n"*)*% Yi.Hc\1=%l8V?*C&o/]]׏G'?zx8;Z|lAG/HwGqqUvopA&A3&ή]\_o.W? ٭_@@c܍+{;WCS߉:х䢶۟o0K}x}'i l-:f4i[MjߝNʦ˥/N~ gS#ȸUnw[}u[P?h}4_۸v?^iKen ISwq~i0Y Yki<(-'[eɯ?GƯg-uC 06(18" $Ą2T)c3CfA&ZD3ɪ@>90s~v}y~zZJ9? -&i}%+4$k-Ō0cx,4, --45I1\󣫼DMo6;Aԛ!$D PY0 \Sp92VaΣK; EH1 'IlD qur~6lpOvGorK `d%|T܅/vM:ՌO{&pk`vo(:u{9r<'NVzumWx D,m|l?'\vԟp.EՌk'Yj{ZuվpdLlW1յOOaxm~k?_O;oS ڇwڏ!$HuڗNe*w-:ÇVz/2Ê}y?u;;_)2(v˶ (:w}9~E.S8 ,N.1û.tW}t(98ⵂwܯu)3C?N:'1~piJ6^xn'd)+TtMԪ_#~XkF7kAL/i`DW# b*};f+wwp56$uOmj(k :gQW #nZ/: 1O5N4WP #JgB*eBϓH?Xj+8Vޠt-TgopΓa3$I.]0>|؅kK*f|˟Rsc yzO(:zz*8KSy3-(Bt:cA1z(oF虙҈RuƒY;Lhz|riU)2GVRXݙt9f+^dJĒ)w$?jޓ5F)FIV'4 2E$`뉠20^ҧZ:YmTmox7A`L!e]0 L]$b*m6G3dM[o:Ć Rvfȁ UN边7yЅ0b:!c[݉ma[kpg7 {\Ln[A=gx͔*bZl[%F՛ɲſJȾ}ݺ!{lLJ%zwΗMwǕN2+rV1$EN~V{޼xiwak3ܵZm-˭?v\F0\PK/>2̪c$6uvu\7V aUF#׊,Ne > o2θ|?ePqud٨f+#hFF;U\'V>h4ւ kƥ4 ""6!qm/S %ں1WË2vvJM9;Q )&̺xX&(#p!͏:1W!FzN9g|( #6Q9P.l&4Xe4XB27|`@kpPfODS;sї6s6`'bwJHVNVNVNVYY͞y_Ś%M,3ҥ95: Clֈ JdDqSVT蔾)ԼJ8#PO'ʣrWKT3+⥌0"  CM#ay)RxH50;,tbBfח'U\4]E0͠DT'Lq WJ7yeP-d3YF2+{}ܕMzV$Lx2ot,xq$(|_iT>BYP'l%vIEFM‚N࠰zJI𔾡hek&A l/"),*)q -B@!,ֳYk=9[0]rT9T,J ..EK[ԍsbHDtRPa0u% Fld$,< Cp0յx0v+q[iӡ᣼A;9!@ro|2yq/l|;eL#OlfTdfr lp(^K,[&.%s|ɓ-Ɣ%H1C:1FơeBHDliBЫa(N3M1^2^/HEx)1҅2`o ͝11tOgLB)_3l&1$`%1 TkYf$DŠ;HdqM$c_X=v']q:2ȒyHY$J"Lw۔xqvq. QT!V0L$(e:60R0SAa(iLq x * q@Ju<*f( ϐ!'F`Pѐ $RT3dC8$:BB D2SᝯCJ1):jlKS)1y5 Drhlչ^?Lh̚%\DO*uf3'one~Gi_V렣FsmG=AEd `HK6 8$w-nPipKVzA zP||tordy5h:-?EdBW?%8@DIP95>qm RYzaB6Z%oݨ_ };@6>sEig_ )^-u@A`emL/'әm٭Yt65 5_}✫bsj30Pb~M_m/GX/7Va> !-WGy 3/wPJGZ͡VcV37֫>,KҜpLTs:יn#|Sx6j}j.uXsӵ솿(\4(,],7Fm`c<ܑsXq5$ >d8uCۻB?Hj!;:!qeh=8\(Ec/ILgDvK!36qR~['܌cQiQ*qiJ9>ԍ9N, x<>`!ִL&z=gS8`?Z=/s -Li )R4.q9ĩݴf8A{:M@v  Te:74&)4 ./WJPgz S@"i(8q*QEdLB (JGTFD&)Hyz@M<_үqInZj5͡FQG Y; XIH5@$2sED @$3FHeZk}F$FS?`4,۟\1YOa&݌Uh/zCwWf%]C(Z:**Eջ-pϯnvHIKV4ZTn3ѣ|OքG]w? cӯY~<^Icc ]GA<΂Vg4R>k5ۨcPu6 UZY:X/c``^`0sm7>^ AJHϚ|}k Kksǂ\`=p-±^WOY>ρ>/t}p)#B@_U|(# e3,k8y4{P=.(X Ê^PQЃ"H>B$TOJ>V䨑"N0>kKlI5J-[RB 8hO3fP5T9gYyǟ+9zBރ^1q )xo3+-3k4)^i V)ZOb[=/廂I,QGD*!I$)X"4zn+0I,*ߟĢ@_usDj@R2`Iq_d _k-2҆^`,>o=I.j=a9QX8ILd TFBO!L/Mj槆Qφu3V$Tz|!Ƴ;iS80VWJ;O>oFlQz=ktxZ.u)/ AŃMFḷzOd;M/:⚅rnK2/LS= %_@.TBy5&2gV]$dǨ2Ed(sm qA#A^.[."F Ն,$Ů.YY6 I&UmFC(vJ.i%pi*}NLC$)c>,bhs$H#$(0"?+D* (H _$(teȅI iM! C@UL%`06u|%mL%W^R#aFce7>$kt6Z|ݍD5; r#[-r(U$xH $ 8!XqS BfN;gҮ{[Omԇ(c u,ܝm::Ɠq(D⩤Ѐ0Cba ;Jw,LB`s@ /nG%*,~(eשI ̳X?Uor\% =)O ^#=z)jhGn]ϰw? cӯY.U;i Օb=o:gS?!CLŋW5L[|Ff,{ gsӫ/{kN{J7Zw+0ԯ4V K5I Heë NJ@u 7Gv^ϥ[ hus;"K^J:to#05$y 4΀@S hFBM./A'q:s6'x0%m9M[dSvw˅寽{RqNw+|+V{ad;oޏIM˿Q{8^'h;}&bKfqOuCǘ[iAl= :PPY,,7p* `TϐPjScG2LkR,fܷk`& %{'(y#BwN½wv]{$d;`A#%G9¦ӎsyCzȚc85Nt>m Oj\0!2\vpyښ]2NVN*j;Q?3ٹ1KXVtAKlvsBr.iH:Vr *m6~n^SN֏Wdd`ehZq :Ȫq ֐tSYf[xDCƦv?BAI3Pu SVЛݗtc%8A@LtHLWXdθDi>bER}-nt7Vu;r*UH ^ rf9NU=N?MWsa)uXqxW_E2&Lb" bP-Hh$pqF)˜FJQ9qQ6ݘGPG J)L;1 kmRȲˆdž"ꃧv۟Coet,_bЇbM9x֗ѫ6 6pV6__&͘/'Xoޅ~&If-T-g+~]N&506 5bӽ׻9CC&<{߼H|+ {sd @^[ɇ]c|7 Șr(ୄDzx\Lg*Q4{3Y(5]3lbߝ1aC5"~B˃^/x'E`֖=6:$+qtx4L@D˹\ɇn<_1;pzdg\$ͯo)ir m"F~p [ڪ~oUMsMD*m4%h+ͧj6e-L䥷7L3I 4?^v[ Y6LPS2"~=kL4^ۄjUM)&!7G'3E&}Vީ<("f} 59h$ ݩj|Z5xh :1iJ+[aI}9`O[m7O5I:ǯMV hZa^śmFۿ\GD&_-VN`Z<7vm8gޓM6Y%LZ3ٮLSg*[-DJ{B΂\F>Gλ݃Yރc e>F锹mϝ`#G躝$WX_}⚻aM ^;z/,{cyKxRѯkSX%ϸJ"Kύw9<$uY`pH1<{.+H^e9 ,+Pݔ{mgxѺslOz"b"+RKe*}Ukɗub\QQU2a UڗύEt^I8·g F邒XVO7u]G"R Gԍ,Q%tSnjv[_IcTDZϵlDn&΁#$4u3pQduqʉ,IXx]|*!0"j9_(NIjfD`6^GIyV7ғYS{=Q/v:""XBG棛1퍎ͦSk"C5Q$SQ5O?ˣ()S@f+0Z, |QNBV41R,ec^@a^hv{s .͋ыD F޸YoD61!owvrac=9L!t \c\w]dZIȎQ!ub3v5_0 @Lf'ٓ+cՋ8J 3:q'k6ޕϖoP%S)UfZ 'na󱮶U;6BqƵ8)`)R1@HU! C)_) mwWvS+m4ząplmnrc9owҞyhZ%4KbkH*p6`Zu9j#8oNp@PgtǦS є2Z!fc &{ p~%}xP njSĥ+J<(+ō.{@4  $ QJjy2iRL@PА#e $>hB P[-fZڒ3m lƧ?Uh@"1?`@ eBGL(=BLd$<ӍC*7~=֪>i7f` ؿ&V4 LcW4ykp@]5@3 {BI+<_pS:\ڥ&硛y B}IDM(i&)a;2&[pf ߉luKxKdd )im>t Y( W,-d,cͮ`u$(He6׋ȷ[| S{y Ȫ5{UZ>!߂'\T[pS 1Ml^l%b䉭6h+1 V-H-cm#XgZR!V~JQS7p@m:${{}iy3 3 7<욐a8^>&`KQ7^NeAw?v~| P̕'pp3LO=d1J' .c- ٥kg5یctQ)Q.;e͇Ze5uwY)-m29}/ Oo J %,gׂUw'tR?HBrR=/$mMRƼJF6f_>'RJYPV&VB%()ֺVM)婭~X\@3|kXPu2FYhb?_͉*ڹT.=Γ@8gu08itt0am(˹'""ys]AGOsCAzH!\u{"KuP!Uy{ !S͖i .Zsaen_w0<͚0=[7傻_c%ӫ~=X"L3Y׸X'&46`> 2 {NtH.Ƞ7OKR`\`,H9VctSdžmHmXxpULXUJXm_fK/ެΧogkoWnҨg3ƮHx L7mWPn@n`O_Nh桍XxӱȏQWx!IÚd3y k5b:ɑa+Rtu.Yb=u@>uvtڊo3VƅWADBW:"Mʚ{VEkkR 0㔢03b%@i\ yq! =!Bh|2H2Pc X=yNJHŎ[ (s^8/:ҪBc[ι~;) ыjJZGD1d@J?BƸA-"J@Ftmb%2ru]p$1jyLS4lΘ >ɷlt/X&Ț|pla><߼~w^1uvGt=0z_W/!&M]8gg"tٯĔff P7ӽ׻dCX}")w@JmD<-ȖIY˞L'MfEI+dXyԋՓAd'ouv$ۋiB(;%mtWMb{b.F\hL̉LEۛj$ݼhoam/y^fqDHracU=pO;&_qlhg]U< 1j6[!:[8(_A ĔU[7U/cĊ~prr-CO<\?T3(h -׸njG0-y}E/flnj{*eHmgvj>Lmq I5Za: :hSnb@)b5;8G`fq@ PZ+XlNmfǽJ3ܪ]nma@JhɃ3fL5ڛsCOBvuqaɋ8ݞ"+E Kю(@WErrvޑ KЮ* l0\T>{aTթ]ٷ[ruWE#}n˱gH^96 vɱGWsʅDhH o661کpѫCBU18kl5ƥB7`6%65G'hP'/U,)ZrqKVNJק " j~/F˖ ._S1h.3E ._R!^$ 5iu HfϤZK>մu(NhO7AᆴJ$2nfŌJ~1R{ڡI֍0MnƒHDkY9`vH}B~ݶ4I!*ֈ`m1 ?I`&0@6-%MR"+s sq!Ђ &PprEG9D"n!·#CA)LI>1v\PoV]ü Gc:71/Xo|:GN&-qi}2*xD$=u9MBU1`Ivb2s. ˧ Z\Br3*赶rQ_+F!MCЊn 6 zj%ħu5W} Xe7vu+b $m`^瘦kxZ 8& i#Ɋ}K÷t.O.J xE{ItzyFƧec?ϴJ)Jwϓw>=ϣIց%_(/MI'Un`Yp5 %SHEޯZmxͦseƼ#:C@^%$ ˖Yꃽfl3ѹF%9-dhL3IkQz!xU4ìRppѽ`CeI.BD9Ե+R08/'QNn@*l:ZWN%vS M,#+Ʌ~U?(aw/8ĉBO?,洢@R~R3RZ {E {U]U]|%,d/oeOVaD/q ` vZY/c,2? 3\Am<32uiD`#_#x 62@&'U2WtB:vu2&8X|Py\{zN_i5= ǴH5-2 ʹfNdk8h^i)M՟/ꮥ`Vn; 0Q2KBJS8Ž9oVg"&NQ(pQy[`1Re3#S 63pB&<0$Q[<@U3K-gMtSW1( '+uM%ȽJUL: s΂e2mx,wRsmZON@T lṼ<|u=E-ciOE$X.j:1: YBk=w j`.( FkGg'f ~1j|{qCB"gޟbK|sw} K;\~e~gM7Inf TşL-9%sg_]}?=[D*ז#ϴILoiITIo|3SHRA&T W2dm<&MW7hn PnfW8=y/`烯Ƹfl6I;]D;n~{7v|~{ejDzﮂ_xv {='&_P^wCkd8jhrEeM=4T~J|X:bE*Ÿ**?LJ"|.k7lKb?5rbə>XB\ kaa~{:yn󕊜 /S~\'V1rۻņ\KZ:g#$Vr*ט߼jbKDy>$VEѱϸo-08'vI <68{άSODVBRTg?dmPŮ˨!_[G?̃j^E)m&n F.p;v؈RyOxuoإDrh衽[0˿b评EM ,Jjr<?\ȜƸ-\UTiRh8To7?>?U&Z؇aok {!hkmnA8oF #]:خw9sguD+xɢԱ@ hJr*S!J0vΰ F_ B砩5l "''FNMAYb%Z4F:ҒMUc?Z؏2"/R~tY& 1ˆױ>L* RJJEoiel,pT((j&li돵[O|j,@W;;z!FZsd5ɦ%DI ,Be}ʀbpJ#j-`4DFu’F"]mhcy6&i;i q&zA"RT |Gpb4eS63*H{,7f#j`5p%D-c_Y7i2v:t>2&=S@,%A6IC:9 y5psXy&S~ C!5K吡kyո fg7%9]Yfcau7ETa/h^&ѼLyD)t<:ϥ&N3bkE%8Ar\\~Hf9|q:Q ɠtAwӔ귡 Ӽ~`~/u<}-͆-؁|;y-u8C[eqGo2a4#Cix[.Ac5*$Q<(kͣP'E%@Sˬ Oܘ@ {V-kb$+7`0Dy.zqˤ޳N"ʈ7JCjE"Dvm30*;U73 6DSں\UH: Njvs|ҋtq `?Of}ea"r~߇zD< '޵_Lzl [q(+F= vB|1*]Waq٨z)+ UKɀH#๋F %}g˜R2ʓ@.9=&Ɯ"|ܯYx|,+zbYI<$$JWwT<[Ro:W:OdtԮa]޹N@+3Cef]z9ݾohbD2 ߳HkE%'6kÏuk]ۇWܚu|ʬrLmuʚM۟jT{R:ŶՆ2~˙zj[ݶH􂤊ю;@OuaGȬjDp_u8H86ڠN {i@kg* \$VcT)1=SOE|qAfx]TgA2Ca@`?9_gO!|l qr,B>X6H^@+̀ ^g9ɺLύ$Ff[̂ 3[/Ůn(r;pτ8>78V VV^]ӎp*`QY? ^JCuU˹_.jj)E|}< I.w7t=ZxzXPZ_22sɠ9+nٴ wl-=fe(B(HCC<2|6>Q&䞭TG ὤ>q II"b}/I519pՄZF&PY $b[U+zWqCĐ6D!TJ(4O5-0(;z ɂjSzjSfd ŗȀ4չզ@$%(]׆<( ɹ\疛JTdZ%M6&MrSQc6Ehz}ALө3Ἷ܋,O $D˽~ Z={?/ZɣlY~܋o[ͅ>ݹ&Dю]24א=ERa2d͂0?0v?Hh3h[n{"䒋Sط BV-4/)i[K_d' ,$i\PI剭Iq IeSy@~Ixx18@4P? K}0@ > muyӱC#6C: IM6ȋy [X҈<KobbIO, ,FX҉ K]%=Oa KW$Q|˲Ĉ㷒P̜J/q/`n>m̈:9gFD+Rq (؉[EӀC5㛋2%К6TF$ZLv{A~uMf"o}]?Ղ4 ÍZBy76 {b辙Q}Z Z>Stq{AN>5֥o..=%Q7s]{uCCrSί86n1+ڭ]7vavACr=)ˋL3(k֝X8jyۍYO'GP@ } cŢCS,!PDHfBO+$ !B$"P6Gbe9\i3k WWZ,_MYgo[ xK2NJjy 4wEpuv(l%[peq,͖űj~tzh!dTϞ̖v }­&C_40}%y tz\Mb7`ӀCW*Z{.G kXk4.r$.[t)G!Qap2Ksc')|OCY0LB],t@3spr=;NC,c]/f^ˠsNnR??b=يӐSX z­ /xG!I-Vu{MW3=:IRqf-~UpG>318D$/8+ pѕ{&r\ ɨvYĞWrUC(ו"PT[r:戹<܊|WBaU:>z%$`q=3HqF+! څu@ťW6i50D ܆\wϾ;Ԙ ~+X.}萘8&,:bY@-~]M^V:I%p3aV>5Q@u^lziYwRIwW#.Ev3x>LI卽 Z=I()/W2(0Nw_Ԋ],2ii  {/Y9u|,@LƜ:m 8p3g\t@EyCJY@3Y8+\p{kvKK`2j\h 4F׵8*E'NhRK,;O%ul-VVR91lޅ4C/?|L:g—M>KT5Ie-'Z@-W*?UɨܓQ'rOY|Cφ "#0$ >HDaq bS)HSz?<&߂?nB+QQ< bPBTH)%2\Lθ,RwO~S+Υ;PJP{Lw;઎#%ppD]+pbX T<B?"RPk$P@M))HO씆 A2T 1 v =2)ۓIfz|} QhشuRKxw8R6{@ޛ v2J>qgFu~7@!ìxhSX6T|қH=q p5$Lmli<-Q׻o6B#=LƞAh(~)s,#,G^%+CMKJH-JK$5+Nǀke{暒xN^ᆳۛv#ק[wp(M4e߷(Ofω`?lnIڻAvf=B'0o݀AILYiOJɎ$P- ƍ=v-y%T3.Hj5N3[^I)lZnUwt[}N@ 3f璝Ds3f Ǒzwӧ]\ wk>vr]m+,rpSo)bոPˑJla8W4ر(.bdLTVLXBWԵ>Δ\=j޸.S$]"uVqKH 4XM[4ڡ+KڨoN[66i+-oOmy׷}@\IuLѼ9洮vi48Nq`)' u`-Mc演sBr[7jܢR/33ɜYfH´*>O;1clj4z3)$&H:p)N& L8$k6^p I#xepZNLMT;CF;gΠ!X*Ve|yF5Drit %FhQq$`գo_{s92WڗίBݙ2Գń+^gufH6g]BBL%,Y xOH(I&P)+\j)%ׇC$|ܬ60"g`>bmB ^$PD?w[$i ,L4W\\jr_=hBu&[7-ZبNwnmγefnsYZ:4+W} aQroMe(A|JCD~]8_?e2Y&XJie0ˑTg X 2d#ӫzMOXUomۑ? Z'#}4VbHig,3fv$٣fRCOT1xƉr&aaopt$}E,0cY7ϓYO_Eb(Dyq n+TD]-7GD1.+ )D5U #!˻!QX!LFEdt%V+AՂA豭ublx蔿b?\wq.B.c"]'Q=5x;Nd!W P?!rD)` }P**Hi}>#4c,(Rq#RJc/8Y*L*)JϫSYcv)JA W; +kq5>8 @\TƯ0Fvyܓ V7GiYr>CbC2bg\d~H!4pJI>9ApmND!*\*H5%pt^s2ΒEzdrfL÷74z67h$]*zOen=vl|B%8~UZĆo}~|\mbZS8Lgg&r>blma=F<ْ!&oz 8 x_Uė0)Dso[l5L;#Z/wcd{қ OWj91E~G9|:'.vN~+o7˹._jx2ݔuߍXջ=/gΤ[/7[՝4WzZoon #KqŤ[soE2NFt?ZM&La0*,6Msg}JI2X;!yS\bp4 E$Pca!8ÕaPd&0 &V\vg2 o%A^)<"<+—YbzKRP>H:M'-Mi/j!.13wWz:8u.,NIv s"/)!@ Q DyIf $ZG-w^7Sd˓=wx21nwc =^β9:mMk7f0==͛k]nĞ)m*+GOI\PK.& 0ؖD(>!*bLB ffYJF ꙗ "(EEY,H !0 eS-R RL{ʆ,Dt喣ѴaJZ)Zw5qFdTbXݕ7L}6apU;ss&##: ..wzk1m%[E%-̒m\6$D4iěj$ܱ·A8ѬKIȱxa96," ?zG/>̾LqjOqRsӷAkk#__?>MiYAo8ɟugJzȑ_t I:4jܰBUz ZCT%̅)wv wS> FaNbZܹ?tw!A&\]SX*k'3][PQIkFKηWTs`3)- 0TL!DƺM'D^ o 9 3]ZsѵS.-FT̨fmMP3zM[p`R)ܰ`(i=dٳ 3)0a[̘'Ϸi| }^Bu#zRMCiakXۡ|w} w=F:rՏP4p ;j i1S$)S.!^5zJ$*Zt;7v OVƔ5<7:j`)¥7+N ] A ;1+糢^oY-!C1J}5j+E.bt@w=\/6[0\?bT+r(Ђ;>Rv9`yG&Yކr-GU˶=uXŊ|K8v8dv4czj*aZ> H!:ژ`5@׏7TgX韜8yY 2Sh7Y4;VPysz:)KBuSmJa$HGShqv3*dB4ɏ J$#U0y3sQBnQ߸a6ڶ$˭Ç$tūŬurp)m7]\Ne( ^WzS v-sIa4>'lIJ)5jUيLF$κM:h5jvG~v.D( ? q ~yS<gިjdOΑ5-kMéscֽBjT[6UwCMmu%wun"NuQ.sŞnguK3l{fR n){/0 ڰY7\Zh KT֭BENs(OZ {"DHH$g+Bպ#B8Ȝi-g٧Zr;˴PRG9x]@2 E1!2]rЅoA}zK-肈Rw钒.<;׮ ]6%;uYk&S;En]y]H/}2+1>4a%6EJrW}WkjٳE&q_?[o_mYx-`l&۬AJ{z$?3rG!f.^)8j_"?=iG?Hpb\̕UGF`qbx5[+ђRpp55nB"lq:-j=>}zpJ-7 )f}hoד(lVSǛNPHǝw81;eFYI%n,١@S^@p_4 5[ 9.nm% "[ tN ;iA[dU(mNh%ر3S}Q7SM0}U}JTm]p`cї|1@e.zcaT7~fj]3~ޫj]3}I£ϐcLR@y:ѵ.FJ !TSj^ oHA/hBM)0;"*6jkxSC [C-"XIS?:=ZUrGj^:]]@-ve^|Cz/ƚi/#J+T04e7"#j+^Nd1(OR``ܾ̼ Uk4[7TgLJDeK^i;d/953ZY4~sQsTH)z=yK\҂MbZRdT&G2 0q#y㖼 GH^4 [bnFi)k%r~sb[:~9?xYwDED~HexlM?|!u :]txd>ZR"J$vY ߴpxQĕT4?qГ,BȡZ9,qQ K}XQban]^Y,ӷi:+jWTiԒn?9.:[bӨ`eXSA,G^jrcsOn_$"ſ:}tLi?9Xg4. RkZsS AFslP+V|ǽQ8po(aisn#:N[Jw[s@[ZoLdWomJm]U/])DSÃN;{V1¸wyxĩacX%PLP#&cl!wP@]2풻P*ڈshw;+xSο:NZw!7k$'NB7PgսgOtA]gyU;]$} p~w➗ ;FEwu )z7ڝX/15|XM.HeDP]UAuUlWJ]]`XZ'D2 "a5e$"{#F€ I4Ɛ{@1Ei{n3pqe=>L#o&v[o<%8[7Lw{J|nS˗C7AS_= q8D>e@I^@g:&t1o%tl'Ɋu~pBN zzXqZ&EAw3{N9U>u%n|C i  ,BJHbB3!dp̏0ܘ: @)`&{AktQ'H|GUN(qI(ڷ0 R"~1,Qq8@o`aK#t :]'.1o/?>HX? ==o X'񰊒_}ƛ-}:>({9Zo,|QvL|f7 cb|kdS,F&UӽDfb&{zpLPΙtk& YS6\I9!P;I.: ibiC{;)Jv-njJAVr}4m$]N7qJc_k#`g #a+ѧ-M0pxDfƀԹןJЉt<Hl%М;6.%󫽙.2/]_t!~L1Pb2 Omj'`1Uetz r"Lf6BGA= :9tOgm5CWҤ0/mVjrB-ӼPdޡjtdрR2vUk5פ weAtZ`PVO05䉫䦏t1ny@RF!e=97NUS=N`EN.O:Ijpj4!4delXCSɒSH ΀ƒhixtu fi ')$YgڶTW;0d@1kdd ,!* h>כilV3.QT[L 7nƥp}D@W8\/60\?zT+rHL{>2hM= GZnqZ`B9qb QsPih.?>֭Q[vhol9iUlnt..ņ ۗD5 ԋh=oFѲ?伊y/.!b~/h}+M=jF||9RKz=yϔIl%Rh1-M: e 5AG/i FRBS4fT5DŽV#ύ"J$#U8ue2%Bғp<3E}vZnu)pX [: 5?_>Jhs*қ{hQ xUWNʈL(⍁ޟx̖ѕ&̻!@ SP_QW*T5yz@o03h7*d@h\Ұ:s"5Zq~`ؕh#5V ފ+ܿ mY\ʍuv%~W`h{R!"d1j&+w=QD~ukTzu/B :߭13k" JV"9.LǯA4u\VtR!ڵhkMV}!eUMZ<9{., 4A$1tԼ2Cd1-+WU K\ds2Ü9%O f;]:'˧(Ɍ'H XΠ˔(đ4N5} #GM;_ pr3)=\ӥoA(N k`ېb].Y*_˺sA:a he ꄙ.W #3+d)ϞwcF(EBN!!P,P@YKc#r#У8>ϸ%| FdIƺƿv?8x{d>YE;,u5 +4NˢaJ@D|z%|J‣@Ɛc(L0HykyX(Bq_s鴈^i<'DQ 9a߹FJ"X` !8|Ā>DC,F8 H 1-PKN&b|%-ϗӛ]} &GnPZ\6&Z@('1`|*J~m2l帏bf@zM-$3󙩷s$>u;Z4'\y!lbȆ{zg`LPNdtXꃱ0E16&Z)Jte#hoJQRb>_:.zsa\/ |67bitV{ĦE`MދկuM[|[Cfsp8c5 TE6]+v)Yfש6.48+=,SS S#G sVʰD&>o4 O-4"Dy20^Q@Ӗu{/YQpuvi!׃z}VKm+Rc!'u(&BR S0)  zSjNln`EN gVkS.K={&ɆglO{8h6aO .U^hL9'zMGy[ ;3g.VgFsfá.o![ [Ovz7_qK6t#t.,78fmj91ZHU4v &jwPZcSz<>Q2^=L?V$6B5fA.Gxz#\;1-oGo {Ag̴&a.r]H ŸP ;~go:D9&LDA't0\[Ygm^-R<VZVaZwr[Y Ӵ}j]iGZE#36DeQSͣH/һo9QwBJw/:Xg `S}{̄6Mm mj ^ajxϣr$}/~1*Ha,t6S17!2PѰHb$ڎJGx-ѤU` GzУAj;bp ǰؘ`.ǹS6 vF:17A0,}f;jix5EÀS])Gq0%9LxeAL\Qˎ-go[ AD($g4Gqavjl1߭ Hf*xf@ Lؠ M@;60"@f\r?qKueouk.B;8z0Na};'3 Ya=>SDݒ-cnIy&4H'|4|ztwvK zH|%( \6 #zpA/x~Tje{Qo<,Tb#r֏MWop껭?I+~˻3 \6s^KUNz/"͙Yw/}KWNn1^sUFLJ(>|ov8Ms-vʮ uGѿ MlRZMR08 ) ]D୮p;hsKF7 /&su2" Ij|m%/ 8 9s6 DۼyOlφq+TIꉶO7q_Cn(RU+]< ).^b{H;MUb/0zZʗ@Rtw/ǜIDO+)IhbkNZ_FbE8cl)n $ R.BXq?0Xĩa6h;j)8D`G8SBM4 Qz(\W\H( hh֥,Ԑu1ѸR=>_Wd &R=X=o)Q]=`hdT%ĉ,27ٚF0]fЊO:6} KUn$_w&B(O}E#A/7 {h!~^Slԑ@ ^25o"DQ}B* j$蜕Nj@KQz<_bozy^:<~/U}{տlḽL!pŸ{*X9[B!p@]~`х5.eB" hCn;eYU)FҹUP(X j H 7}*^[rB*K/TNN(]9x%* 1zyp. 3$WO(D,@`=9P=l _AC "XKSTw<4`qY-YXGODV[]#(gwTh8M2!ف@8U9X*NN 6W*8–*-n4x$ &: ?y5T8rb=pZDt1Әq#;3m1 mPRjnLn׋Y 7ś7rRIaF˰ ˦QʃtyV?' yGO\}Lכb8ygBM|`` c8f"<@VgLJ{RO,W-/o?+= Lᓞ'kRIJq5Ι.#!$Jw!`d<Î70J!&K #dcbBt@n#XZ9 d3+bf7i@SerYO~C@k$[ h4aHF @p _\!^1oO*G:D!* B\ ShHJPCԚȫ!dh:'3MKo(LVtqp,~.gǑ|䑍l` )ϣ>7zvU޿lHir+%W3ju63o0zg SSiOw:mYq4T3MG=u{L% ޤ?xre3qxAI]w\ 03c@_?8:鿘Gt?Zד(ʠg-ZY+zSÙV~KEm f޾_=@8;羿L kN5}2{ «h{Eο3eODqH p}#$ju7WQJniѪ7wo&/Jx{w?J1~៯ᄏ{.>w]rE}i^I>x~X8Qy{uzCeLCewuйo4QgC4 3HwNCn.>}w{wR-TRѤvF,|Ok}nF Kv1 @U൵)WyT΅y))!)^Wk̐҈/a4ҤR8F>?yZz T39j8Kۻpq;7#_7O{[|~|!/v<: ~SIlWQdj/rX۳/S?euzS ^җy6f=:l z~Ui5/Ś?u >z*˛.㼧e]ɂ$~.@m\W#es'sfEnBE/kS G_ʟJ ^{6fkx.Ҥٻ4܊Q]_A~G]16!''w9+GtyW䏛gyk?&Wu_C81/ӄ'ӟ ܢr2+? Dd9Q/[~ῗhZ+p\N"vQ@\Md>vS~/ |/h,2Z mܿR-&a٨|9 ?b]Tsrw_U\񸲬G`y8⧥\u~Z̬p.8K~*Vٖ0$#׃˗xAKW]q[d85$1*Tkd0Y21=ӊHPf=/=)ν]wkJH T-qID5IYSW/)Ն͒LdX f&"vjm6*0TedyW,6v$.#9߾9χBp_LT_عQPqBeK߅-CfFۓ ;ǔJ;Rn焈BuEjUia/'6)p(ը ;-!*3TΙ 4@^fEw/+*m] cႉ9ݘY:)/njԡZ3C0,7oP$7/ܑQ [~7z zyYCᆶP&39];|sZxAs bE':JYckŋoiyCG\Z+NO.-#/]AKrwOoEZjeێf6;İFxEl!dJඣ PRJC>"u k-A}5`Lwotuoz[]ok DڢmT޶&@  &&B34.fABכLڃECP ^@jdjS0h'Ko,T]pXLH@HEO7X\kEM=WPa3a)(H0Ciaqbd$JpHeBFeS*Ie6=1vkB+ K9*`)(|Stf"i,6$Bv » rdR͔ (QP¹ pɥhB )I*Dhs~.*$ڕp wYB־.8$t=bK Yr3Irnmdd: GfYUhtI"I^t-$ϮFi^a.`Weg Ngy rjQhcuE*sԚݨ$"^;xRza"'r[=^@@T1CfT 0h]NzGNjV&&.Z{VSoIw.u|͆M$i F0RTol{Ӝ=U灆B{^s0I֢xZvb>7ϕm:(z[\UhǐS84R5\$aֆ] "[?Ou0oйźJ '\`#2P _'QhZU@]ER#E~?;`¢D`s].ygm]{Y˭r*%6d~1.en{9ސ %-Mc舲3?⾻'wrĊ Ӄ^p_u6Ww*D0 B{tc3))xp1pdoabyb:K L$ <(љLYFXDybIPJ @bc`!3 ͌e*!/.FȎrmzi(ZoFp]66{\ ++lI LЪV}((hz|=HfMݱC?rS:L#?X|ő 6/4tŘM&1ÈfM7fgSr"`KǑU`rVDh!#H9eq*/K}]Zt%=2 )Ҙ6QIn'e2dk-PMO(PTDNxrB=J!b3:`TP1dʀ Z"l2,(2EpPBysD_Ns2H  Jy8I0B- @M,mp܄! ph\Γ;-"4z,-o}zeoi)2wU/M2ڲǩ>NЕo{GNBʤ**^*ءd |Fc|k+ߡcUymLB9{kqh_Qp4\@8/;x4=,O{{TC FOڜ4(ivD)r g*%fBֲrSqFқ"M" |ɺ)FPvv-sZx Tu@}T*n/ _y|Kjܯ4az;{z7H3ϛz=K> /Qqӗ'ͭp6_,j4B5oo\0N :xdj~2 Q{!3\ lSrf Rғdcl l[{,!Rqޞ Mu=ntm %*A[Fi=uD3xg/@[qW'sה[K~,zNV6}5g⺇?.|2?3gОC{V Y!\_.E96٠sr*I2B8klvN+*ht#ps9O+(+pQw'7'Ȱ]ēZ/eƢ|:9S%Nj!Y+d~UlͰަ<,E9'{"ɛN>fePlyuq! uZ8vH9`D{i1˼p',:_#pU_A߃aki%qh_/bM=?L4T{Sv ƉUuھkGkbB\WzXnq6 %UbŅ1d0I(Zջ % \!ꌡRTfWkGhښAzw5ӹ{'cFq.+xKmXl`nh״$֊CG<77ĺyt)&ILή5xxvNi5p@q>XGй1`UVt+(]⑧ ,EŬÚ.,/+\rFѤ8D"v !sb ,2L0 |֘sفٶǓId!+.(`V!(@ )pzaZSk x~GlBlArm_UB#X;5I}6:D2#y~|@G^f2J,HSXB3Luʒ'hX}諰=`6m+f)8ֺjrubckAt^iTtԥbLjQ]*֗RڋKVj \..JZuaJ#u[>*Aۧzrj,ә:lOGW.YJKOZYG StGbb${ϐmi)Vv]{V;ܒ8ٌ}aszbEuEsvq!U\7|x}5|g+#Okz'Qy,|LD§E^2=6,q9d;>Z-##I d",CXE?VbF,_x)Cʓ$7PL<ЊVo1J޿UoO!EC*hV!R;T6,gfT kfw#&T\3I&?#V螵d@\4I#GF6Ύ4l2sC,ip)\3&yn8y5WzZ넝\AJ6ٯ ׇdvbl;`BDlgy눝{M]',Z0G^z} 0["H%B"ϝY=ywm~#=HY}|_r*@`a( Xd &G$p-](ч9'XJ,Vmnm{5=6A;wCuluRXSW(+8[ -F>-r&5dbxU! IgKI Cާ ȋNGhl6q9J͊m@4FdjSƘrHQ@ vKU^&h*}W%,ܜȗFVc:dLbAv`L#h!,cgF޶RI_o71k\vdQa;_[;K4$ښolIq? 2 [r%*qD [IUNl FQA,I8̖QKh`KMvF-C6򟙐\3Ŏr2RdեԥZ"9ZLa[_i]j0mė9j-,DZLb3_~#oGK "$s(ʣocLH(N;ICbbDqFje' Y 1ƐӅ1P%o!A^]a~#oGm;P1 Wz E#K?}o&.) +~bTWϟY*(5-Hc.IDcжFww)!Oa"aJ#D|] ,G:Ckbx J9 ( 쇻xx~٪nѻIO1}Gm^!;tnk8)o]=vߵE,6;t/bAtǕ.ND2h7wk-{يsNcߋP 8Guw@'Jz7wMѴޏU~{ M!;X;Ku;yeԤ{sN܇X f.ΥD\#Ƣw5BuXy]o!D;<nxք/ Ao>\D,(AsWv.y2|"['m.m!\|lMQ[=hnСYB4nKj:+ؗ G*dB$ngauӓQw%Vzth۷T3\2|1sFeFk1\Ew >Իf40c8 %0f5r~al2 8D}'6ń[AkBiee,۠SӢߧB'"؛8'aqHJjņZ7 G+@byR0ECҀ7/brg׆L!0 g8xpaƀsT~:_Bdld5;? <xX'k+kjcIEÝ;w,QB> 6u|Bѵ%f-f91f$L7-k8PwUV._eVVU22u< #؇JSDz1!@D#.1XNJspdBrbfq[,>D &Eck `pG`:GVi#f EFFHhclsV+F[.}W.v,j.sa0JA4"cXD"9- v)Wg%@ .ѼX>RQ?QL%.L 99(R\ɥ7] BWתܝRUPnԡ^SSƜ #ݧ#1S6ұHaEcO@ecbieXy>1dbdؙW7Sa"kɽMX@b83_f\6 pPMuTNPMB) prJP;a@H-$Jmtҍz` Dl' yv0\Q7[6 :ayhm9 *Fʩ4U)l [#ERW3qW'bvn~|Q:7V_R[(x\33?`\k扥,V2_nsc{W)F_TC9/\\ n 8<Wa%Az'.$QwMA$ d^_ZSKN=NTq(]B~ T"(B,<.Z<Iyd\12*ؘb+ }؟5t].f4X~;q2g/H%<W;62L%xۂ`HJcQ>@1~J-^s.x;ZDAw`q[9^/(I)Ao#z=Gb?v]ՠ]ՠ]ՠ]q?ILEq 4g`sKXYQ"d,8 È!q*J}&-oGΤ~K!=3,|wi%9)>yOldZ0,C9˰[Y=g:g(hO }b H0H18r _"ziT\TR%'RV ːI%LwJ)x"J1Ť,d{fz&M"KB -$m6M̕U:Q2%`w4 _i>4Vjzk|$?bDJr~1`w #sp j`zrz'\kMᮿ4Aw @`^va] סfe]+s EQQ_VY'~'?K!`}<SfN*NԄEέ*2(TT88.>|['I/IeL= QnSIWͅ(} e]d&쿷 8cM`.ĐzA;{0keW!G r=n"X!GbPTKi*Ԕ1$| -4Kȧ4/_m;/sYPEb%'c _޵R K5/ 2~d/RBG 㙃~oE$=DP)ikq!$$Œ>\GvdeTC+қjIbk*DJᰢHᤐ1b CDX`"dC86AT}Lx=0`Ш be.avoOZQ:pqxZTت  khAykYZ\3.!/ARo;O$fEg7?ԁʮPx& hr9ة[b&/60[^f_eٯDs{}~̈*D9'Ul&80 ?,GYx ~jg8+:6]#N?K —{u,|( [\wQcO搨(#F{a=zCu{!V/i߇%&̻̻K ,ҔKҏDSJcguFVp2P1t-t~nï3KyrAay|(ZNܛZ}V xŏU θ7$*2T0*C|6k~лi(:U5KZX +?zJB:h46w?8MEu;v>h["Yl۝F)I2ynaAtԦ߭w݌zrp==\G:vׇv d]_~>~oσd$+[;?W?$:~X5Yjo^YWX[^_Y~7j`t'7G6՟/S^ ۫=j_ng[ߩo_mFol֚k]l3~zBvךfUy@w/>_Kiwգ]dbRwf6y}>j6NkجXzr@}bN#ڮ_mף5qھؾ:O_/v*eٱos{Eypi<8|<>:~:}>w6>۩졋rն*Nejg]m<;Z=?7N;rk~^;= nI!nMQ?[rp隍ӓõciZiG:>M?9mA?G݆ i sJOfbLJOT[ۤ~|A:>fs F?B0}iNOQQ?Z6v>ʊ>|o\٭ˏsޑyzht[r}c~9/W+nU9?3h3_މ/uV߹ۿBafѾP"@'^( &6/!R1P k7l::G:Kւr䀯oGU"~M[fئ{|}y}}Uu|F6Ol~X+f}H~j1utKssK6KGqN.kk߻9F}'?re/G|tp$>Rtʮ?qvmSW?~;k8?tvsSKT>n7v!XmpuY͵6=ZY;y3DW_WrzZ[$ ŏzj-57GlVퟥ~u}d.JenvjmҷJ8]ۮu!?/ӛow!.:k7A{;+hhQX'7u%T[Vv._X?<]_Y=g߾9 p]iX[QUZwۨ4FMRUUjng}?ޓبq;9Ʈ'ӾNBM<ِ oÖ~[^MUm kt雽v}dX2i~Nb0&P+.쮻k]mkP $4? ~~\{Hgv}^ pó1  d`"FY6mQPmoزS_t7ZW QB}ɷaWv5MmoX6sSՊ^X6ЊEAe{)q5PWGEcl^ōY\9 MByօ>_l~t\:qW>yY?߭[ <{ټfHUU+M ND2mQo_-u̠Aԯi,ǿߚbQ7kYt>e='s:? ܧ$$|Kn)o/CZLRIy{瓝q1"?* B=sWߕ"\`+|w]&eK+=]ZY&T,w&e}4c&sݹ3Q:nmMn>3ˍ~)ʿO|)F+؟I.8K_q=0jhT@QQh :(P1 RI u(J ;*NWl2^[QD},գh"9{S+7Ś?PPT+K0NːQ"I\tef˺/.-l*Q%4PfII{SgLd hU pd+UO+{{r/쎸E(ΤHeN9 ka.kٱ 5~ 7G0o0&LS2w12 QE0: #l*05;gv+#F%CV"%LXƀ!j0a!fH#c*=鉉bnD바b`au]2f.,bhr,rj2>(9y[0CD22Acg^w+K e>_uik4LMIIK^1b4deI_d6VH`lɂHDmR*d4k 9TؕJ )$#/K Z3Z`5.F“,$F#E\eC#R t$LjK^i%P#?WUm2bqt) h~!nN< `J ^\t{[TV%2s8Z@W[q:*7n okSdIGr@o'qדƁRX!8txb@jN 8{@HR3>b24 k2И e児tSrGgg(1 }4+5 (S&%߷ֱnTJuk'L203DYkp.$DG((V㐇 ͇ .ߥj VS=fRFx?7ny7E4?Uɞjj}Me/4#Z$‘  V >v^O_F8IO*V!tޣ;:غ~_ e&^H~xQj?E!S6ZD#H"e BM@C` dWFU!P܇2L{No?Pny-hn|˺|7x'u?*ɿOaYJ9$ URe9]L'[ )]gCf)JM!bI_u  i]}qWpC+ݍT3;w*u1~^FnaDt,2IH'W8rt]82=qQ F!#3qTɬ)d(/{ML^-IA.\DM+GrgwFO2 {ZJ܆6e"hARa6i3aai(FmyƩ3wdj>b'PA-="}GYt<]=&^ M UB+`pQ)/FJY,CfAĬ2ʣiVzFPıRJ ` @HB#X1`HQo4'e9g@Ά "1ZFQ&g eU샏K!VBXwФ@ 9da*2+cs5 А^Uݕ[_OeEJ#ͪź 5II7X脼6ج>=!Ur]1i{m֗fV ;\-M=lƏl͌x b ))tv[ƹ$äF{=^$lXQ$!/\Do%k7,h4Ȣݦh"v˟݊.djԀ Z}ڍ2-/ |D';h)z=!gn+,xj"$䅋2EYnr2Μ8-UC大}1Wo\ƟQYgϘ{X v^Xm=C<eDϩ'32 -xVkfFw'(g9==Ao?KMf.H 2/wrF!>'(g 0%i.1S&SFAb[WJo*Y.m'Ȕ"Qp66dc8ɸS"x U'j4. %8) R(Y\nadLeŋc^,TNifysW[~,~e|B_ZVgbKTacux c"_GS׀d1Wݫ3DkTϐY 2p%J崝tx!dhbs{t!>A !BXp ¡0 !҈(E`4"ڑAz1 ɴ] RB"Cԅ"h 8$JC@ 1b rx]F$%~XnjgM6"9yƐ*,5'B PjC# )@R&[P:(<0#g"0e.Vcnćl$32g5 H" Z6pQ(mw@`B*)68c–}$,S3z+PkNDq5] b%1|eb$CH~n"fS:V툾uCwwQZ>:a.7ϪmN웏·`xTEp>9Us-2Ajt҇`0:J/^8t)w~B'X.WhQD%‰UєV*\ 4'Xi:yɣWEKeHukZ/ =}axC2.i2DЍ $bi_rxv^vbbj/:Tx_wc_nHjљs' ǰ^^ez FXbc"kzL@PT,`H% 4L#Jd#srzp6yjW é1=A/ҕS&zA&7{aSiGR9Ys2b fJmk)h7: ?[]|tNuqgݨKTXRud,"iM]v DVC "D$",_CΑ )6{1N[wE s~fSxN޼@PRPe{ ʢE"DQ1 dH!L0"(D%|L-CV}%EǔxnRA@8}zous36H) HqϠ 3l3>("O͸/`\e]8 &ZTH .BTXQĤЮL*CQhL3: MUx"V&T7@9'zmj2BL`6ôjS</;itZf_>DKn+ (M:X_DB9X#) e Cr,55[qI>gRN^6gty X,B\Ա]i{LFO(e2D񬭿.'1?V5uvF 4g7JT`AM+1P= xyE'e@аiHFy<#$tܙŀ$7Ezd1@Db{BAF ']bt M{2KVzfU^`r#S0ya콊r@ .djT6_Yb΂vK,m֌)\m<[E4Lٷ8b[^Nwdn"Xڷv_ES!!/\DG)ʠNl~GubaH'6Y ɲ` i#+CuP <0(X( BDl%Kgzȑ_X<@o;- <-NWI~tU)92벥Reb0d|VIPqT "p?ykC-]Vݑ$,Z.P9>Z[uҋkV3RR;y$+ YN)@b?䭆xa5*UձV2[cm6Sm/,GFeH7;)9`xsAEPriue?/qכq+; 7Hvi\_sShzǨH!~EL,!O/ļں"O! ϗ~BUA\͹9pϛ&m_6 BA%(]lα|^+΍/_XR\R|S_%K2F*~5:,VOۺd0p{YN[G VitG%.pV?1cB~($"˲S DphE~iwn7/BNo $kr4eŇmb8YFEpsRFn\i魯%duԱ ,Ͱ~Ӭ{ Gw oL8r,{'s<k.s==̮l lA/ݏ -O5=ؽ P$uצ8{c(>&3$5ߣFB@Qqsn,Yvj8ބyD9:]T"Oc(wk~u>=ؐ㚰[7OmvZهX46A;>F6.zSmu_+TDPtA}@v477ϦĂGAưDʽ_z7G16T]Lgwn;*ݚwwB~pdSj#&xcSmm vE8!3ջa!?>)hS0ѵO~ҽD֯t KJk32Lrc` Lr+WIMރy52ywKzWloN?{OjmܺX=Wz3\ ;$BpPq@bC yLgl NgۆʺϯnXX'[^I_C~oG٘-ha_93Ӝ WxҜW`-" JE4rj~ Y~'V[TkML٥ֳ4Eɑs( {h*Do:2o&\ݧgEw^_'Of":EߝFg˒Q8CnP,zxIBd$;.]74Й9lUB L~ -Q*Q̋ڷ4Xݚ_T~wf;ʯƯ1@B>}4s"j Ĝv)*H7Y;"&k1RPt_d\ާBt)&4;3ݗiR-y:SX %nR?d&GnYMANݫO`@6񈅉I֐CnȞsr M.&S{'@+#!,*Ĵrd Weԉaw~yq>pruuFy6 uR>rG͔j+)voha:j;K6<yGp!H"e3Hh5k;=؋З~B5rz[tTHxq$@.h ![%d6>H NAHs.$ (rNrSP ЙsiN8Aɢ㹐wYI6FO%W@N\$I>)զ#fb ȧiD, f'4$V${7K%9IqF޸w6U<˳Xޱt mlMꏟ 23{r'gmj7\HЭ}.7N74wv(ўG ת~S nf֐=|7]` шugū&ЄR400-9J܁گra RKkDĘ#,/6y1+ȃ>9$u^rI8]VG^ oWn ^90ݪt`Y_fT/FJ%"cDd2+b@=ySrztw+EV|R)sE i4"@Ҕ?A2G x"eܡd7HJ|<~{=[VR-uۺŎt+ʟ@?n"~ƇʷV c|1įߧO]\FriGWr͝>=?'N %(Q@<}t?aℶdV9|O1/ "\N*cmU=V? kgr&};j?j"uNUݱcA5%^^uǚD8AzZeҌ6]{'|(pZ,,~x4{dPΠ02cК18<-D#kߜh6ё,;2U(L&L.3C6:;#Ȋ& 8o}ב}7r886a`m臽39d[P3\2&>VEc@a0Nq@g8k{qȜ։BRYv\"ri 7]KBʾd\gF{ݘ@ :Nj듩&jrtN k DOnulhb~BSUːP+U82gzXa"s# K';&hBs9(,9n}`6>]f>bP܅Q1FCMql-P%O]H4:jh W؃ED/]^|(7STBN hE4ORJ"t4|K"rN̴&c,GCt_OC Ju DC␙ 1(e)5ohmc<7F Ba N3$Sr:+ pwdv#XGC()lyN.KBQȨU#\ ?ɔKcҊYUȅ*LYb=1<\HȁˢǴ | RoN&S| QOZRRg}hB2bmqVudY2O6F¨v.V-3gMу6{>&1<1 |e $9T(0$ 1SED[Dvm:m.duWȠ%ș˕Cmy'=x̛lԖš22 rpp6,ٶWM?m^mCuFV+EI$pĺBU {wbJVG>e%=AK֯v7_!Fw_ˋg ݕk~:3 g4]uPPΑVC ‰_1e`<9`F_pFXH:Z]mFcIeg2똤Hq1F 7Fl;ʈ~3:t>"hw H2qntBMH|Wӻ~s'Zqi h1#yfRl0Znm68^uUEmw+jŠ& AΞlL!_1?Ek mFQs&aq`eGmd]3n|JJr]'N\I2 \A`ֆC9SZI23y6!,@ІȤ#1t)UrFk %NLKqܕ&rt=P\[)"{fq|`H2!zwF[*thM<5L;sk&$o{09l  w*O8xUppg J]M^_L12y׳a &_ D`DH@L0Vօ/暓1Z7zvmFP0tɶ/.r>\E`>Ph[z$)NмDVV_;p)?ޮ \!.!).(ߠ$LFr<MgԱp܈섘j̦Q@kMY]mY ls2.s%Maz9xZ826u- O'ƐU[Z\z.0MUKs<͒}^6V݊EkƐMg}݅|u!15q I"ޓh mDu;nFIZՖ\:Պ@+\Z&G*gVmn1d&P\K5S)(oIͶ/n޺]k>:KsyV(>ӻ健{R걔Rà,}קw{ ZKc(vciḱRX ^Kҽf4X*Mf)g,}קzmU{R}jRߕ)c>k=7K9Rʖ;K^#,k;7?WfLwneK`j_Kcik=3K1Pߘw!.}קw{mu}d)cXq)Kk>%\+(ױA L|}ǽ,Ȓy(vT6yg_.>}-6RO^OPΡR&턄$h`[A <)9XRP]0Zh|n2 <iVJ+A[czw.^ٟb9?Ɇt\.N_q=myr^qo.{8N{9Kr ᤁwh67%C>͔9_ |ˍ&Alv~bQ3PMUE~.؁[~;!+J<~g5ur>Ru |mo'dS@@%lUƉ8I|5 ŇVtHJNl9Y.p}k<߈Uƽ1goZzZX* bZF#V5`M-C:lDC?:n֚/UJzg3=|ͨf^VX2Acr)`HqG. o\|6 YBJ/-RlOZJs9GWӘNS=.uVVOq>\lB!S6-PΑS) eq44RoJEJTK jzq5R9c׌U{D1v16ԅX{|0]wR۴gBr2ZC1'Ngm^.q9S7M~{_4]NMZ-i$xlLu@<W`7U?29KȾ9M'a;z'0+>5bvE#R_VpOʽ8"eʆ{GH1& -\ @u 9sNerr/Jk]فosS*b;WQ/B'\^؋1EkwXOQQԕh1Jm'k9 l1*0٩5(6M=H c`H {2n_欿RخǓ!aG8Muc+^+`XΚWd(Q.5b}Zxٛ/Q )e/-(N/5҉zkwglZ rH; h❔qBy(>CsTQngQoUKQQwqF6x:{/K3Z7vgBsqԁ ȄKz?K[RSӕɬfvIaN'F1 6-xK~_ \5Įk/^osEfzuuߟ\ytL{%m3|<>_Hraau>f@D|QHL,Sy%u8& uD)Y琉Rjo1M_t[F@%:wې([UѣQ$zj,?>ߟYl|hfG;:*P"i"Ad ,Pgc(@d( :pk˟f_V )-`UHKeTi!!T%q2/T >T8n+3<.ܮ%7_Md(K+~o:4#@e5 hXO'F%E&F"OE,x?)JFL-Y#H)vĤL%Ycci642gMGFb\nwZ.aD^Xoq b_puy+rgwjv  bݢw?Ϙ>03Tؿ}ikM?:żXvrYԟ PVdtSbT$bb <2LyGyZg6RуSZjٰf 1ji^pUsڸh^`QP)9-1Bhe\l#F~1bJk(P褷qR-FƒHܚ$ L1 YK|A2TergjkRJV9/ ٝTJwrMn*AMmh 4+2kyW>; lB6=j뜡Ѳp6c@s|`^WT9nI :YCF8(E|@_ZVMEWj,!Չ>@%OlfqoA>DYL`$Ps\m-& `s63@%`G6ɷ6 ׀r٣N=py.x+(E}ѳ$(k O#b]0͟]ݾ;r֊njxfeZ8B]!p: gM.F|'kH !)XwC떯Lк:(cݺ$V֬4GVBq` S{|K \2֭4תiͨu!8D0%1%&\pNTUpnSUp.o(c7ȩo}n"5mK:wnSGD*.DC2a%X iW\}x!9M[?py 6ڝtQ7mjq>bV^\>VC2|1[UmSr'[*=,-اrUV&ΥӭkZJKi5t(STV8.愠xnL?,gqj{뼑Rc{*]aU1@C`'6Z )M ]SbiViR sђL?в#gW> y98tExYl:?7Y >  6QOh-FO<,fEiCϓq8YU,ȵPdTM"$xYѱ '8(&"RItD(|M.sD,Kqmi(|)`CQuC >WRhvD0p F 'BBa-Mo``Mb (kJ`N@ 0\ޫS-ժ |b)I{ju%BB6SZRR"H P(0nt^ Hbl"FN5{`G2GA}]{T+{Q\)Eo*̞N5Wz; ŵ5 0 w`ݰh+~uΕ*[nތZ:C SBU\#ŵqAfIIΓ;O* B+U^2Ǿ$tOX#l?P`5ΓMѮttdom")-@XI<&SN%ah@DGʬIPj> oߨLmَvYk]$JXvop uq /eSdOg5ZqߏV_t}zgWrP,6,gky#$.%?Z=؋dolqBς_]p݇o+SRPrؿc%%EwQ(wΖ$yl(AUs$@c"@H! (gƦt")R, k dH Ek)Uݩ?Z)961Bgs R/Pz}j@ R ővgd䇻s$όgD_obh>A4<$mD3=r4ph a%6gmn>s :wC'ܹy&_$qToWMjazԢC\ǞcOQuRS]Y8<׼V(gZ*%eGgt{sӵ"I:LQ6( YHKu$,ߤ'j٨P[+ u4 w:`Fpn$AYq]H6zq!I3XC0lbx1SΞh.ۛT}t=ݺɨeR)*ABAjjDYVN{KII0GI ppkϞ,59ڕP|m;4 jYHb ٣AꛮPƏrK0)0z0F' 8f3Xi,L!8֧0#mm Mx/NQ}O5%9!o:D)fWJ/Z%;IߧՅq72(e )^6JpC)O[!!zR.PRl%T8s 8uP3n$<<D# %c5 < "#124@=b>1߆jƕE-Oѓ^-PJ/qH'ʐJb l2R&-J{g(=E>Ռ!{(9;1V}?ZaǿlbRRI/PJS9Cd@E(7F˹j.Qz}9U (D2Rd?l)s-BaǿlrRN10( )ՌRuv|BߣVf ^6J vC)ΞJ vCiJ5C̅7R ,i/PJJivbQʘJYR4p:QʥJyz{4Ts.`R} Js/7L&QJJI(% )Ռ!ƿlRR^27T|3RtǧQф2B%4QpҒCJ̵" Qo8UG :(R5#Ql;}8q>7\Qsub9Qe qm,SBc`V 2gp3q 6(' iiFi}{W;y^\_c"'7JjuW7xFh1}"mh@$O+_36`~!:S0y{5{WO_kQKi糧bDҹ&*{Ii= ?h9wM=`ϵ࿐6Lmَv~0TBu(ml>3+iJ섈)B=G۔[О..K5~Iٿ/^E~n zC|LbbDvPm= s^̬4`p5G |yOx{ 7VH#6G Lu)2[7o߿w5X`Xk^/lՒr#vޒ9 5NfuɱyX$V S1fA rC)5Xr!M Vl`W*H],e5=_}BЄ$zh;f,azTrA$E  $̠0IfjCjT'(H> h"K"FΔY}fwv0(Cg3v!+G&g$A0ihH8Q1L!c!A$R//&YMWdB&S7D (4$l}7).%Z2CPvcƅJA$$#pM8$aDQIqpԼ1}n•Hwer?nrˉ@\ɭCQj"Fu CTHD(B!ya8aL 1VZ_#C$߽O2ɍH+,rhj%Wʇ{|3FnK4{ }!υgJQ]Dٽ~޸ !#L}?7CkKfyζW&.on̔[/KyrltybLPv!d0-5}ض9{-B .7"Kn{Zѵ-zqqp~L4ߒs~|8&;\p}5Hh!7ã?\W3y:_̾rbo_SݖibK$i6yOy,6tZ11j3Ǎ֞Q[i&U}McX!bl%LmS2D5KO˱Ls>&\ 1Uwuz}pW0WtMVBW7Dh$3)ooztw%P!@1vtQK5Kژ{pC+F^m?+`ۻn_=d&=,̬x{ =f=B~4n@]~xO41~#вK aMUw"}t(ncOZQ$ԡheKMrYb_ָ}nZ> ?)sN{sa Y,Ok_w.@6[Pu1B+꽦@*-~TTwٮN$+,,@| &r>a%w~VŚ)GV Z?)>.KT6EmBLkڏ~p79.nx|oWŗhvq=/pyUwOyN9XSD)v .`6_(,/5u+`~wa<@֞' K9okXPD)POa $%G+Hς )JISP%/8HEŤs4P&/ZOOu8A 8♎F1jPED4P٩e&4@)0K|Ԏw$!.і{SE(GYU:xIn]Y,^F*b#vJ }IRA]\rs1s@&_^ - bW6e!IzJwl#* k ZGgO{(%S>g^guՏBT)jTn5x{C5['lj&?oZfhy n- Z.^>;:Ũ)pZC>m+H̻ja H64yp?,t`>m{yCDJ˧=aHhϣQUr(-b-_T;w]TTSA}^T}^Q2tXkEcmCKǽ;0p-tR BՑy5M R-*oF^ɎeEd&5>P a>fw7SǢf6sZt5_űN<OSW-/\O:4\ {($wtOa'2B:K"v;%Z&$֮ 9T ("(!؄Ene{1jh`]RX-KpMZK޹?6Ub @e(6zUu ^=ҨCqd"m0!RK"_). Oun򻶨~Z8Vcnr_Ű@hJW5_Gy[VRŊhZ6wR#I{ѭsoT(/WaR Zn[X=5R~ ]լVBXY @k/ڪ٨r8fa L'RHՁOy;HaZ*tD u0S4So6bLU*RqSlϺJ!AM놥s[|*֭ÃnϺG{Z:)z4CׁJ*)Q {L0,wڸx8y S)KϚ.Jvq%?ۯȝdϣ4wK2{ĶFV{dߕ^>b\`9-l _4yL/TR"6}4,(ܚd􇩕rf[Zs0F]*DɢԵYEݣ_>*`K|V晚gd TL`k'%Q{[{3^Ef4X'X8ޛ𕔬g7Dxi|PτJjJ.Lτ+IoP(ԏꃔ4Q~Qˊ[?ԓP0Lb02BdZQfXiuS$Nh1&O.~|2n;^_xUg ̄Ʀ5zb%pc%ZX,&XHMH)vvgwb{mK'lb0a~3:|湋O,4?Oc5G,,Fɯ]vxC揼s,_٫DvO><ӧa[==Q+q.UU`PghdmxiPFQ}d (2jh8cڿ4Ҍϸ\ITi"Ԥ)!Ž:XRO6 6d)1"cdkf)di.5Jyv+N@X .]&z}RO6kə'RiKJd?V|iKs,g:Yʏ^tԓuf0ԍmKK=YiřRXʸ~~'Rk g4KA KA49;f_a7αXYOɺH,=i*S}7ʹKRj(738YJKi/W 뒥T4b+Nvss{]rpRF)y,-̥Jf`',(D]<Ŧ? ϧoo:ڝMb]j, ʂ@(Oa $󼮿NJA`I SN+k@gPIT籩T=quzST1xʩϭoQX,w+#?b1Nϣ_ q,m+LM|3 J-ދ*{/߿p5vIP[[>hfsgNٷ[ROtuaHTYz x q&f( my+> d¬q$,D~= W:ny,x"W m/6΀CU[aj{³6 !FF8i:[0ByKK0ΟPR7)%X9/3z.5"=v*~,e=a)~,ͥ,=mz]sVi z}Zpuns,Ek1 ti{Rd~,ż\cSf)q*z.,_YY%qd;8E U$YT)!Ky($h)䐱$?i*fMj*hi<.Ǎ%4 ݈UT$V52]Ab%Ii@gڟ#2r9,F7U֥Rq.7Ya^{@p[_i5@RVUiDA+ {. .%7d9Mog1JuB'wrX oG{Ȝ|tXӮ|G-BNsOOKG7|S%{]^0[(;CZwĂsp2p{W1/>~/2_Yua9%GÎb2<{cAλ{lNUt^K~ex=O$O5LW}|ՠ+P;EBm#х1UCۀ$F }#~jlbMpKlbQicX1`j{*URSyזKJYQJTy/Y m LiT_4ͨ%5 S#RVEUeLe5JIRE"L(JS*@؜ hkvE4UxHZW1~ȗ(&?]޾+oq䟳;JWL/vt?.>[S)YLyN9̅lT \i%.+ȫ5\ YAŦhL۰ T)P;}`=N xNsAaIKFB+ .V$pBht[dJy).v\T YFV\ ξ~$.ݱCH+Փq^JU,_ay355$>?W-??J~VJwE.|ىBNoOة?,,%G-ߩ@oO Z[#N(-L., S32H,drO|+?mTR?z~r{@~B[oAV6@`D-dƫxP Pid:TقSA Z Z5H=PX.쉣KI|Œ5(R-YC.E藨YF;k3kq4r@֐H_= Gr3Rka<> 5Px+ty :츱MT<.FV MmbZh!9l螢Rk>Hu܄^=>:WԚbO9k&q^j-JgUmIyws5YO+[ǧݻ)8_*e>'п1bbuo&M_gVę'ݵW=?'53{\7KIjD r6ki"0ݖܨa$m֋(ާQ bYj2zgQjoFrfx=GvRu飱DoMX)uhjRZO|SWL<-*Hn]onǐۿJ5娩̲` "%12*d8lCVxjߣcK8Cam~_  ISUGN>s33oIZ3 Z! 8BZ?˹BJ5&MkynJQ%N-$~gicNr\.ɋtْKrf&*`g{ RkDUGG 7HU crK0XVB%9SܚaS)26h<а-}:Ly bpcN\˴H U1JI@Uc`_=ݾ :P C {!@5*mr ȭfT;9I jy.?Gֲ/3~PIQkKwVJ'$ HWsɞ݀HsDirnSְ C' WMJ~P"]&Xяϯ13RhACtMvS *DCۓb@ڡ4n0)Pr5]*+shE Wo\oN; O'(ʓz"<_W(85؂@|gШxغ:Ooc ZK0}8a^kn5V TT a0o0 K [W2m>fvb0=LcQR1 ]j-Ԉz'k6RFoHvtO:b |F#2S).: %!Qrr1zû;>wcEp-0лuw(L=Rrsӻ9i{n:N3x}V`i82]ޭ sabz5cBl O R3Фl-+Ef>>|m}C0^ k.Y:WPVEH &cT-֚c|Un3V9"Ղ&8ÚX <w4S틤Й(Ёy:zoVkՖ1p0}v1:OlbQg>i5{fBrd T$pүѶd^n/5jYCa{R;9HD>_p6@" Dg͡|= C4Sg FG>wcvhpޭ s`ʶ90n([B&mUXn ޭ sF` 82h3ZfJpI .X;Dm?Bo 񥕦}7*5k(y?S͑D!rbTufXo}f& ]q_mx<ˋOHef'OqüӇ"pRF:)M,BԦc]*`VdghZ\,-p 6e`2+lmtm+iU[@-<#{ t>vvodW'8~W:!;D(bHR_ BU <UU=ܔެ0gsv;l"'8`mnR#MnNu%YԒ.4篥5%pߐ֋>@QJ!j@KRK>qD4lӻm(P * ZqDATSJ5J5ZksRta(E-q(}Iu9QG0JC,:R#a(}Yu:D!4S_WUO}TQJ+QEJA9 jJja(E(N/I}!8pQ*Eru7䥯H}.&(=hJ CDoo4 J Ci%5ǵFiPT]QjAARjtX6JRC@)0VR#Q" 2i1 Ci%v8u(B8ƯQTO/K}.6&G0*}TRJR;q69l*ReZO$zeRvA([ץvYCG)0AƑ Ci%CF:A_#1cYu~Ҡ%uU7T(YJ(=lT:k(P : ^j'Pg:0*;Ju`į}!NEPUh /K}._z( QR]W=JHm7&8qP$+Mi$CRBrɘ7TRj)ҕ6Ef(B öW JU+DwXRo/XRs{WY/x30 dNMft"_V'E[jmUG`2-*]}̃׷S@ɽ"ċʝl5=ZfvU~xC #>pe5͋uYvs3옵Iy#1Sv X.ttu6EԎn"cCː(rZ|ZD-TGqFำrwxXnH>\mY6ʩzwsj}0@{йl`D/N%v V˥jk*͏PN~ P6 32+Żlf,8/v  A4σhWEs_%\ySԪR;P:# &3sR,+ے3l6 '[ ٦+5 M'Wg0/zxM:x4Tf*T{P:6eěEaB`ByyPU$ﱬ(|[8%bt-pse{G H ?~0JPՎI}g8tPP\lg橳ٟM<[j|vX}uev˷egaB0!_ۮ^H; M癡*r+Ў9-,1 Z6ˑGlHE* ub{ͺ&/W.F2C `̼-#w2M pC9Eds+ *#Dn54aZZMӸ>_dC;uV [[nkG+[%.ݫް"{6zOw?}u(Ǐ{yowpG #L&&U//>~ F>ܖvNKqߞ] :wA"Y}xHB}<5j \6`s{y лxE}JJ F6,ݜ/wKq)W ^%XBF|3z2[ q[ ^q4͚-8]g ʠH·&@q4`G+iCP& c7 ݗpvw.7ou.d5#mFAJfsv*AY4Ytx"b&jgW*ˋ0ڇك-&YPHl !~zrw)FQQi 0_m3n,0Ԋ<.Ĩg`ov­tu9DG$xKg}6%ozC ?t}zW\.%.AfQp)w=ZP1`jR6aMdçK}xpY[IP&CG#!SI*មOٙ˅iV*)4xLV ؍_d].nȮ3H6VQOƑ8*iSTUA?P#= LqcAf i0¼%0}4r4UJH*FzSJ#rK @"J*]hB 9`3oIFW4˛ZC Le(@ 5u nqB9JκysπZS .r,<–GbS TJp5~zhh7꧶6{|%#L$L!7fG^o=gQm.G=x!}㟺~?箣crJt "?:AdL "mUjtF;͍4n?59}m'SB}:]Hh?dJ;hT0 ML ]`w(Q>b@vY&P5#DZ?$JdJ9!@K_\kODs1M6,Rə gJ+Phu/]DiTTJVydC*6c0EO)ipD*Cp ='\?wwP\\]&z UzN*f*́ܵ |qt nQ+)>VgR3>]ulV̳legO;%>@ znoV'Uۛ VIL}wTPNϗ=7zsrż+ƋĢE@nysO N &QDs7. 3'SPS"3MϢv#7 &x5~4bvgC0n8L,nʁ3B%pDfJ2&rEsӦYořK: ](4 ^+8&@4HqdOUs=NUO1GlnvgqGO{An!5::Dhdת_aVLܬ|77ۼ7isQOܬ["vWѹ'gq0]m('T|бڱH_Ϩ/LAͺ92ہ4jArѣը{->jEfl9E0JV蕑յMyH^[ Vqd7Xy}D4r\ݸTnvD\%v]P8>gYl)xeEp-%(]v5cޑ1 JYc{ؘ7b,#9Naz떫GP/{[>=w+̠:~YZ-~I7x(ZPi28} ё0a(4x BsEtCN#e&B{) ! 3d)-ŞhĈ8 i-%#nTysG`@9.\Be 9,JZטYAkhňFbHSA%PCg U:ψbށYn-BQP ULq3k(*{ȥ,vW5 RKi*f2yl5XkYb怢]2 >U(u@%O#YeG6Bi )ݢŽtz!Rs#E͐0-a'G>n5#Z\ z1U)aFO? {f)(̕g7EMxqݣ]tO;؁m{oE&3FB;#jc&(bf0- ?fh%T B. <9@UPbTB|zxvlᢝcuw~zrwcVr 뙩9djOk25nGm25G! (:x!Iz ^БV R9Ԯ,A_gϨe/Ik1!ZJ655E&ҏ1^EڨYY*"Z \ |@*4ujݢc.}t.};u%"x̥:%SyqFL=A=ddRk8Vi^ٕ_qʽ?wb%C2%%EY:*h0Rи\rZBZfhRoδ1w37lZbKu|XІv/jb8B@DC±GF" ` ԖK/š)Ր>iP|z_ QM^&c^$aʓLg]\[7hff%(IzjHzUr V2HօBwCJAyS- %1( YJɬK)VicYW=m"1nh%(dINHH)+5|%.}8o 2 p(d8S :Ei%|ɧ(Sgxͅb{f2$nfy^T}uonnKv6ދ?. ݜ2r&bݗI~ ~yy`-נqp>a\!Ŭ@M-BwJdG"Lh^ѵXphsEG8;ȃF4o!VTL\X @8L`3!M4נV< l7'WdW(#۔"Tt@ nR^ 13647ms (00:07:20.322) Jan 22 00:07:20 crc kubenswrapper[4829]: Trace[1572874777]: [13.647525332s] [13.647525332s] END Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.323941 4829 trace.go:236] Trace[380310818]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 00:07:06.719) (total time: 13604ms): Jan 22 00:07:20 crc kubenswrapper[4829]: Trace[380310818]: ---"Objects listed" error: 13604ms (00:07:20.323) Jan 22 00:07:20 crc kubenswrapper[4829]: Trace[380310818]: [13.604373694s] [13.604373694s] END Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.324023 4829 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.325194 4829 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.326338 4829 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.329366 4829 trace.go:236] Trace[1655412799]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 00:07:06.345) (total time: 13983ms): Jan 22 00:07:20 crc kubenswrapper[4829]: Trace[1655412799]: ---"Objects listed" error: 13983ms (00:07:20.329) Jan 22 00:07:20 crc kubenswrapper[4829]: Trace[1655412799]: [13.983691608s] [13.983691608s] END Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.329678 4829 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.330180 4829 trace.go:236] Trace[1407752585]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 00:07:05.961) (total time: 14369ms): Jan 22 00:07:20 crc kubenswrapper[4829]: Trace[1407752585]: ---"Objects listed" error: 14368ms (00:07:20.330) Jan 22 00:07:20 crc kubenswrapper[4829]: Trace[1407752585]: [14.369056366s] [14.369056366s] END Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.330231 4829 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.341033 4829 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.341367 4829 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.343438 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.343480 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.343498 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.343526 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.343566 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.347013 4829 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.368211 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.373365 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.373489 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.373515 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.373580 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.373608 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.392211 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.395743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.395776 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.395788 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.395807 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.395828 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.408078 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.411632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.411743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.411822 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.411916 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.411992 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.421486 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.427064 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.427108 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.427120 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.427138 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.427154 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.439919 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.440199 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.441955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.442010 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.442026 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.442050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.442073 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.472704 4829 apiserver.go:52] "Watching apiserver" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.476435 4829 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.477076 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.477572 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.477605 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.477743 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.477859 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.477992 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.478206 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.478315 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.478392 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.478501 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.485053 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.485485 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.485926 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.485955 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.486170 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.486276 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.486558 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.486755 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.486926 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.491028 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 02:06:06.038239165 +0000 UTC Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.520131 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526276 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526317 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526348 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526375 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526403 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526425 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526446 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526481 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.526503 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.527626 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.528125 4829 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.528131 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.539042 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.541353 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.547113 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.547152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.547164 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.547184 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.547197 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.547177 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.549769 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.549798 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.549815 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.549892 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:21.049868034 +0000 UTC m=+19.086109946 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.550103 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.550131 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.550147 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.550211 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:21.050192374 +0000 UTC m=+19.086434296 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.551127 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.551202 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.566691 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.577281 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.579070 4829 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.588844 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.598114 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.606768 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.613784 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.623025 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.627450 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.627694 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.627824 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.628213 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.628651 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.629554 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.629849 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.629938 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630020 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630115 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630200 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630286 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630370 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630452 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630563 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630656 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631022 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631131 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631226 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631327 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631419 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631802 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631902 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631991 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632089 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632179 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632304 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632714 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632829 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632926 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633026 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633120 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633447 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633565 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633676 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633767 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633990 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.634087 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.634176 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.634609 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635672 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635813 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635919 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636010 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636665 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636709 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636733 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636753 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636771 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636791 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636810 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636829 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636847 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636869 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636891 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636909 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636929 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636946 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636962 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636980 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636996 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637014 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637033 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637050 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637076 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637095 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637117 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637134 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637151 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637172 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637195 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637217 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637237 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637255 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637276 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637295 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637317 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637335 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637352 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637371 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637390 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637409 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637427 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637445 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637462 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637478 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637497 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637515 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637531 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637563 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637580 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637600 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637617 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637637 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637654 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637671 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637689 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637705 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637722 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637739 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637754 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637770 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637786 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637802 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637817 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637832 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637847 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637863 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637880 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637895 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637910 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637928 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637945 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637961 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.637978 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638067 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638086 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638103 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638123 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638139 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638158 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638173 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638189 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638218 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638234 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638249 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638265 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638283 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638297 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638313 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638332 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638348 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638363 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638380 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638401 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638420 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638437 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638454 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638470 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638486 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638503 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638521 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638556 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638583 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638605 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638625 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638643 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638659 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638676 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638694 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638713 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638730 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638748 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638764 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638781 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638796 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638812 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638830 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638846 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638885 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638902 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638919 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638936 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638952 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638970 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.638993 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639012 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639028 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639044 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639061 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639080 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639099 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639117 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639132 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639150 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639166 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639182 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639198 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639215 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639231 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639248 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639268 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639292 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639312 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639333 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639353 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639373 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639394 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639415 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639436 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639456 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639472 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639488 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639504 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639521 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639592 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639618 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639676 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639723 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639741 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.639765 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.627924 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.628042 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.628432 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.628599 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.629490 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.629810 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630752 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.630911 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631056 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631205 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631363 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.631533 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632163 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632330 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632470 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632644 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632790 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.632929 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633087 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633245 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633281 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.633392 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.634428 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.634588 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.634717 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.634942 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635205 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635245 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635268 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635278 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635489 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635582 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.635914 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.636610 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.641663 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.641883 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.641905 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642063 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642085 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642084 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642153 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642513 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642559 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642636 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642733 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642821 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642831 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642978 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.642920 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643117 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643185 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643193 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643245 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643270 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643367 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643447 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643580 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643767 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643795 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643768 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.643829 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.644091 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.644579 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.644768 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:07:21.144745313 +0000 UTC m=+19.180987225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.645060 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.645090 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.645327 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.645463 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.645886 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.645940 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646211 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646232 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646396 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646407 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646455 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646498 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646677 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646725 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.646945 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.647112 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.647182 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.647488 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.647828 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.647948 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648013 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648119 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648180 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648188 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648418 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648524 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648756 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.648786 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649044 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649049 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649255 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649260 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649344 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649385 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649402 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649476 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649611 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649682 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649743 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649776 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.649972 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650105 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650367 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650374 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650490 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650686 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650715 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650816 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650939 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.650948 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.651034 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.651157 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.651321 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.651713 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.651764 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652046 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652070 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652043 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652337 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652377 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652395 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652413 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652704 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.652969 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.653106 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.653246 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.653347 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.653592 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.653951 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.653996 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654012 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654025 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654038 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654058 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654170 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654233 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654487 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654537 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654717 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654828 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654934 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654964 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655032 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655031 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655212 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655270 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655345 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655432 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655311 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655614 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.654981 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.655947 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.656191 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.656488 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.656560 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.656613 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:21.156598143 +0000 UTC m=+19.192840055 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: E0122 00:07:20.656651 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:21.156645264 +0000 UTC m=+19.192887176 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.656823 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657007 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657031 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657133 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.656835 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657325 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657353 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657428 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657813 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.657989 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.658148 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.658388 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.658379 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.658401 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.658688 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.658789 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.659733 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.660060 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.660135 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.660610 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.660702 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.660771 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.660887 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.660910 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.661174 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.661269 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.661631 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.662972 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.663040 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.663870 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.664229 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.664286 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.664655 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.665288 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.665373 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.665508 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.665597 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.665687 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.667827 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.669361 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.673304 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.678942 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.685397 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.687801 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.695129 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.696810 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.740207 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.740326 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.740673 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.740767 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.740853 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.740932 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741009 4829 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741087 4829 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741163 4829 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741239 4829 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741320 4829 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741396 4829 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741472 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741577 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741665 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741748 4829 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741825 4829 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741902 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.741979 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742057 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742134 4829 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742215 4829 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742292 4829 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742371 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742448 4829 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742525 4829 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742666 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742698 4829 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742715 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742735 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742753 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742769 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742785 4829 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742803 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742819 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742835 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742850 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742896 4829 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742913 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742929 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742945 4829 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742962 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742978 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.742995 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743012 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743029 4829 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743049 4829 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743065 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743083 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743103 4829 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743119 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743136 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743152 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743168 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743185 4829 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743201 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743217 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743234 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743251 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743267 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743287 4829 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743304 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743321 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743340 4829 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743357 4829 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743374 4829 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743390 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743408 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743423 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743439 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743456 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743472 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743488 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743503 4829 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743519 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743534 4829 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743574 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743592 4829 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743607 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743623 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743639 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743656 4829 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743672 4829 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743688 4829 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743705 4829 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743721 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743739 4829 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743756 4829 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743772 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743788 4829 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743805 4829 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743824 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743841 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743857 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743873 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743888 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743905 4829 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743920 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743936 4829 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743953 4829 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743970 4829 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.743987 4829 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744003 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744057 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744077 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744094 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744110 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744126 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744143 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744159 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744175 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744191 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744208 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744226 4829 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744244 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744260 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744279 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744297 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744314 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744332 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744349 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744365 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744382 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744399 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744416 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744433 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744449 4829 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744466 4829 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744483 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744500 4829 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744516 4829 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744533 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744573 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744592 4829 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744609 4829 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744626 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744643 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744659 4829 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744675 4829 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744698 4829 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744716 4829 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744733 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744749 4829 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744765 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744782 4829 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744798 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744814 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744830 4829 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744847 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744863 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744881 4829 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744908 4829 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744925 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744941 4829 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744959 4829 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.744976 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745126 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745167 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745183 4829 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745202 4829 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745219 4829 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745238 4829 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745256 4829 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745273 4829 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745292 4829 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745308 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745325 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745342 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745360 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745379 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745396 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745414 4829 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745430 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745446 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745463 4829 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745480 4829 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745495 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745513 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745530 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745570 4829 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745589 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745605 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745622 4829 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745638 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745654 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745671 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745687 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745703 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745720 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745736 4829 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745752 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745768 4829 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745785 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745803 4829 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745820 4829 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.745836 4829 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.756749 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.756779 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.756794 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.756816 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.756832 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.800935 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.809622 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.817909 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.822055 4829 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54648->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.822115 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:54648->192.168.126.11:17697: read: connection reset by peer" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.822631 4829 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.822653 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: W0122 00:07:20.858694 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-91417994c3253c402aa5d234a8950ae5a35aa32d6b095ccdc1875a27dd1325bf WatchSource:0}: Error finding container 91417994c3253c402aa5d234a8950ae5a35aa32d6b095ccdc1875a27dd1325bf: Status 404 returned error can't find the container with id 91417994c3253c402aa5d234a8950ae5a35aa32d6b095ccdc1875a27dd1325bf Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.859430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.859511 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.859592 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.859728 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.859811 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.900093 4829 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.900464 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.962632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.962675 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.962684 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.962699 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:20 crc kubenswrapper[4829]: I0122 00:07:20.962708 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:20Z","lastTransitionTime":"2026-01-22T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.064978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.065041 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.065059 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.065085 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.065102 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.149942 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.150013 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.150057 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150136 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:07:22.150115727 +0000 UTC m=+20.186357649 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150164 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150179 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150190 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150260 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:22.150249441 +0000 UTC m=+20.186491353 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150295 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150347 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150366 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.150452 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:22.150427386 +0000 UTC m=+20.186669358 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.167595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.167641 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.167653 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.167669 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.167681 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.251382 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.251418 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.251519 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.251594 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:22.251581786 +0000 UTC m=+20.287823698 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.251651 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: E0122 00:07:21.251795 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:22.251758531 +0000 UTC m=+20.288000483 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.270291 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.270339 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.270353 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.270372 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.270384 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.372935 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.373007 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.373026 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.373053 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.373070 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.475360 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.475405 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.475417 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.475435 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.475447 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.492085 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 19:44:56.850606172 +0000 UTC Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.578205 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.578249 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.578257 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.578272 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.578282 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680367 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680420 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680433 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680451 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680462 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680787 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680870 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.680906 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"91417994c3253c402aa5d234a8950ae5a35aa32d6b095ccdc1875a27dd1325bf"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.682272 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"54f7a6a69f7ebdbc8a9541e73892a0396dd687c0b8c935744634d2bcb958c878"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.683817 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.683857 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"da7a3408910b11ae80ec01bf237a98b6fab969d0bdf762c90d1ba4b438c60516"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.685173 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.687566 4829 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2" exitCode=255 Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.687581 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.699160 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.708366 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.708379 4829 scope.go:117] "RemoveContainer" containerID="24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.715904 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.735428 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.754084 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.770878 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.782571 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.782638 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.782678 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.782692 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.782709 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.782722 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.798568 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.813877 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.828678 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.850338 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.862725 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.878851 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.885211 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.885250 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.885263 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.885280 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.885293 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.894066 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.988301 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.988350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.988360 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.988376 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:21 crc kubenswrapper[4829]: I0122 00:07:21.988385 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:21Z","lastTransitionTime":"2026-01-22T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.093013 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.093055 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.093065 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.093081 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.093094 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.159731 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.159811 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.159859 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.159978 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.159998 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.160011 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.160078 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:24.160057882 +0000 UTC m=+22.196299814 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.160110 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.160145 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.160160 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.160160 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:07:24.160116354 +0000 UTC m=+22.196358266 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.160232 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:24.160210206 +0000 UTC m=+22.196452118 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.194626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.194658 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.194666 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.194680 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.194689 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.261181 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.261218 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.261314 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.261387 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:24.261370625 +0000 UTC m=+22.297612537 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.261325 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.261448 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:24.261436187 +0000 UTC m=+22.297678099 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.296732 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.296786 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.296805 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.296828 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.296848 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.399291 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.399347 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.399361 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.399380 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.399392 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.440132 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.492615 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 07:42:00.248560993 +0000 UTC Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.502573 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.502615 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.502627 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.502646 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.502660 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.553315 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.553352 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.553587 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.553536 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.553682 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:22 crc kubenswrapper[4829]: E0122 00:07:22.553776 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.559741 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.561244 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.564079 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.565134 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.566398 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.567114 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.567715 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.569118 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.569438 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.570206 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.571595 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.572268 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.573444 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.574179 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.574831 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.575985 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.576624 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.577878 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.578332 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.579067 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.580271 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.580848 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.582038 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.582613 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.584259 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.584484 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.585014 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.585757 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.587077 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.587741 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.588925 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.589380 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.590245 4829 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.590344 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.591991 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.592951 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.593334 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.594773 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.595408 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.596282 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.596988 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.597956 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.598742 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.599664 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.600212 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.600376 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.601439 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.601879 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.603398 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.603877 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.605116 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.605391 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.605439 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.605451 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.605469 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.605486 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.605754 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.606535 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.607005 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.608229 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.608992 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.609878 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.650495 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.674185 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.687291 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.691998 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.693314 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.693977 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.701953 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.707694 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.707729 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.707741 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.707757 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.707770 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.713899 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.726239 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.744956 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.759522 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.772816 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.784173 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.797910 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.809204 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.809239 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.809253 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.809269 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.809281 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.911703 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.911778 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.911797 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.911823 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:22 crc kubenswrapper[4829]: I0122 00:07:22.911841 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:22Z","lastTransitionTime":"2026-01-22T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.014796 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.014847 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.014862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.014884 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.014901 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.023148 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.042513 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.043742 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.051343 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.069607 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.084680 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.102490 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.117232 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.117284 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.117294 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.117308 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.117317 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.120491 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.136695 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.155302 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.177277 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.198347 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.220702 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.220760 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.220773 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.220795 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.220808 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.222581 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.243192 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.262639 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.278198 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.291961 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.324908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.324985 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.325016 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.325050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.325076 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.325364 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.431899 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.431962 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.432013 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.432040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.432065 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.493593 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 23:26:48.242723479 +0000 UTC Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.535108 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.535156 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.535174 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.535198 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.535217 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.637423 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.637461 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.637470 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.637485 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.637494 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.697585 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.713840 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.726247 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.738601 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.740786 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.740822 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.740837 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.740862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.740880 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.754231 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.773261 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.785056 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.797131 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.808696 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.843186 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.843231 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.843244 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.843264 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.843275 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.945997 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.946063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.946080 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.946104 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:23 crc kubenswrapper[4829]: I0122 00:07:23.946119 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:23Z","lastTransitionTime":"2026-01-22T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.049179 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.049217 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.049230 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.049249 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.049260 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.153438 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.153496 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.153515 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.153574 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.153593 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.179689 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.179767 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.179836 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.179965 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:07:28.17994426 +0000 UTC m=+26.216186182 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180058 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180104 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180107 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180125 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180146 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180162 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180244 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:28.180226159 +0000 UTC m=+26.216468131 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.180266 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:28.18025898 +0000 UTC m=+26.216500992 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.256302 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.256342 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.256351 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.256367 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.256381 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.280717 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.280748 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.280847 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.280903 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:28.280887023 +0000 UTC m=+26.317128935 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.280928 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.281016 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:28.280991166 +0000 UTC m=+26.317233118 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.359036 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.359139 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.359163 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.359196 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.359220 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.462061 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.462322 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.462465 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.462596 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.462722 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.484911 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.491636 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.493988 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 20:00:39.241308834 +0000 UTC Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.496104 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.506043 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.521417 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.537017 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.552696 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.552728 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.552829 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.552797 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.552962 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.553172 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:24 crc kubenswrapper[4829]: E0122 00:07:24.553284 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.564458 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.564509 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.564525 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.564566 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.564583 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.569819 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.585079 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.604992 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.631964 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.649111 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.667310 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.667511 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.667686 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.667821 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.667902 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.672299 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.690293 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.708333 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.725408 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.757599 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.769998 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.770039 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.770051 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.770070 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.770094 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.778424 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.796995 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.809998 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.872811 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.872862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.872873 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.872892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.872906 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.974802 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.974841 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.974854 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.974871 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:24 crc kubenswrapper[4829]: I0122 00:07:24.974882 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:24Z","lastTransitionTime":"2026-01-22T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.077232 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.077292 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.077311 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.077336 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.077355 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.180044 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.180420 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.180659 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.180877 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.181067 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.277402 4829 csr.go:261] certificate signing request csr-t2hlg is approved, waiting to be issued Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.283889 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.284212 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.284356 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.284492 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.284645 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.297030 4829 csr.go:257] certificate signing request csr-t2hlg is issued Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.387198 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.387240 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.387250 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.387268 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.387279 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.488906 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.488945 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.488955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.488971 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.488982 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.495189 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 14:08:03.645991131 +0000 UTC Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.591411 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.591466 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.591483 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.591509 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.591526 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.693532 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.693610 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.693630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.693655 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.693672 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.710870 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-vgv2h"] Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.711187 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.711483 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-x4jcr"] Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.712043 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.713294 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.717338 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.718520 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.718777 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.719081 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.719182 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.719328 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.720897 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.734050 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.760404 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.785176 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.794526 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-mcd-auth-proxy-config\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.794589 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-rootfs\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.794614 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8d27a46b-130f-4497-af81-45ea63a50632-hosts-file\") pod \"node-resolver-vgv2h\" (UID: \"8d27a46b-130f-4497-af81-45ea63a50632\") " pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.794635 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmf49\" (UniqueName: \"kubernetes.io/projected/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-kube-api-access-xmf49\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.794656 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5bvf\" (UniqueName: \"kubernetes.io/projected/8d27a46b-130f-4497-af81-45ea63a50632-kube-api-access-q5bvf\") pod \"node-resolver-vgv2h\" (UID: \"8d27a46b-130f-4497-af81-45ea63a50632\") " pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.794678 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-proxy-tls\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.796231 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.796275 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.796292 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.796317 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.796331 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.801834 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.830296 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.846284 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.862884 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.876071 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.890429 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.895699 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-mcd-auth-proxy-config\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.895741 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-rootfs\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.895768 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8d27a46b-130f-4497-af81-45ea63a50632-hosts-file\") pod \"node-resolver-vgv2h\" (UID: \"8d27a46b-130f-4497-af81-45ea63a50632\") " pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.895790 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmf49\" (UniqueName: \"kubernetes.io/projected/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-kube-api-access-xmf49\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.895809 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-proxy-tls\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.895832 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5bvf\" (UniqueName: \"kubernetes.io/projected/8d27a46b-130f-4497-af81-45ea63a50632-kube-api-access-q5bvf\") pod \"node-resolver-vgv2h\" (UID: \"8d27a46b-130f-4497-af81-45ea63a50632\") " pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.895963 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8d27a46b-130f-4497-af81-45ea63a50632-hosts-file\") pod \"node-resolver-vgv2h\" (UID: \"8d27a46b-130f-4497-af81-45ea63a50632\") " pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.896401 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-rootfs\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.896807 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-mcd-auth-proxy-config\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.898051 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.898318 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.898332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.898349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.898362 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:25Z","lastTransitionTime":"2026-01-22T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.901799 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-proxy-tls\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.906281 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.924100 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.925290 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5bvf\" (UniqueName: \"kubernetes.io/projected/8d27a46b-130f-4497-af81-45ea63a50632-kube-api-access-q5bvf\") pod \"node-resolver-vgv2h\" (UID: \"8d27a46b-130f-4497-af81-45ea63a50632\") " pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.930493 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmf49\" (UniqueName: \"kubernetes.io/projected/f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc-kube-api-access-xmf49\") pod \"machine-config-daemon-x4jcr\" (UID: \"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\") " pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.945750 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.969097 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:25 crc kubenswrapper[4829]: I0122 00:07:25.982086 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.001141 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.001172 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.001182 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.001196 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.001204 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.013241 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.028923 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vgv2h" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.032897 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:07:26 crc kubenswrapper[4829]: W0122 00:07:26.050665 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d27a46b_130f_4497_af81_45ea63a50632.slice/crio-4bb4c0cc77a1a415b1c562b383de8f478a7720f3a8561bcbb16066d0dc8e395c WatchSource:0}: Error finding container 4bb4c0cc77a1a415b1c562b383de8f478a7720f3a8561bcbb16066d0dc8e395c: Status 404 returned error can't find the container with id 4bb4c0cc77a1a415b1c562b383de8f478a7720f3a8561bcbb16066d0dc8e395c Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.071970 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.094692 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.108793 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.112683 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.112718 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.112726 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.112742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.112753 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.115965 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-4ss4n"] Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.116261 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.120372 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.120475 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.120630 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.120732 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fd6j8"] Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.120767 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.120851 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.126624 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.127310 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-v62gj"] Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.127988 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.129881 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.130086 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.130146 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.130207 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.130317 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.130322 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.130492 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.130898 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.134145 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.134352 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.141576 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.166705 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197476 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-system-cni-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197523 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-daemon-config\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197565 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-k8s-cni-cncf-io\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197585 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-multus-certs\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197608 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-socket-dir-parent\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197621 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-cni-bin\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197634 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kq6h\" (UniqueName: \"kubernetes.io/projected/60f879f6-8b21-4e75-9a62-d372fec048e1-kube-api-access-6kq6h\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197653 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-conf-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197669 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-cnibin\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197686 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-netns\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197701 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-kubelet\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197717 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-cni-multus\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197732 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-cni-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197749 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/60f879f6-8b21-4e75-9a62-d372fec048e1-cni-binary-copy\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197761 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-hostroot\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197776 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-etc-kubernetes\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.197799 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-os-release\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.198343 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.213217 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.214522 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.214562 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.214571 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.214585 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.214594 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.233994 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.247515 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.262977 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.277254 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.293454 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.297881 4829 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-22 00:02:25 +0000 UTC, rotation deadline is 2026-10-11 01:44:48.88204716 +0000 UTC Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.297928 4829 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6289h37m22.584121244s for next certificate rotation Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298054 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-ovn\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298087 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-kubelet\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298102 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/60f879f6-8b21-4e75-9a62-d372fec048e1-cni-binary-copy\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298117 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-etc-kubernetes\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298135 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298151 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7zv6\" (UniqueName: \"kubernetes.io/projected/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-kube-api-access-z7zv6\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298167 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-os-release\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298206 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-system-cni-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298221 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-script-lib\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298237 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmcbx\" (UniqueName: \"kubernetes.io/projected/257dfafb-8d80-4de2-97e5-96df6b004a43-kube-api-access-nmcbx\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298251 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298269 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-system-cni-dir\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298284 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-k8s-cni-cncf-io\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298301 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-multus-certs\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298315 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-kubelet\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298330 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-systemd\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298346 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-socket-dir-parent\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298360 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-var-lib-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298388 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-env-overrides\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298403 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298417 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/257dfafb-8d80-4de2-97e5-96df6b004a43-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298432 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-conf-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298448 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-netns\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298464 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-node-log\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298480 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-log-socket\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298494 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-config\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298512 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-netns\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298527 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-slash\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298557 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-cni-multus\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298574 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-cni-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298589 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-hostroot\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298607 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-bin\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298623 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-os-release\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298638 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-daemon-config\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298653 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-cnibin\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298674 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-netd\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298690 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kq6h\" (UniqueName: \"kubernetes.io/projected/60f879f6-8b21-4e75-9a62-d372fec048e1-kube-api-access-6kq6h\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298704 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-systemd-units\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298719 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-etc-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298736 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-ovn-kubernetes\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298750 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/257dfafb-8d80-4de2-97e5-96df6b004a43-cni-binary-copy\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298766 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-cni-bin\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298781 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-cnibin\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298794 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovn-node-metrics-cert\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.298865 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-kubelet\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299429 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/60f879f6-8b21-4e75-9a62-d372fec048e1-cni-binary-copy\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299465 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-etc-kubernetes\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299604 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-system-cni-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299659 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-k8s-cni-cncf-io\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299682 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-multus-certs\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299723 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-socket-dir-parent\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299766 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-conf-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299812 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-run-netns\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299840 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-cni-multus\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299877 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-cni-dir\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299899 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-hostroot\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.299939 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-os-release\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.300130 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-host-var-lib-cni-bin\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.300194 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/60f879f6-8b21-4e75-9a62-d372fec048e1-cnibin\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.300344 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/60f879f6-8b21-4e75-9a62-d372fec048e1-multus-daemon-config\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.307576 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.318804 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kq6h\" (UniqueName: \"kubernetes.io/projected/60f879f6-8b21-4e75-9a62-d372fec048e1-kube-api-access-6kq6h\") pod \"multus-4ss4n\" (UID: \"60f879f6-8b21-4e75-9a62-d372fec048e1\") " pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.318927 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.318941 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.318949 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.318963 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.318973 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.322168 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.341159 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.356685 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.370682 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.388478 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.399922 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-cnibin\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.399973 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-netd\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.399990 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-systemd-units\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400006 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-etc-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400021 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-ovn-kubernetes\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400035 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/257dfafb-8d80-4de2-97e5-96df6b004a43-cni-binary-copy\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400053 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovn-node-metrics-cert\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400068 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-ovn\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400085 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400099 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7zv6\" (UniqueName: \"kubernetes.io/projected/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-kube-api-access-z7zv6\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400116 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-os-release\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400149 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-script-lib\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400164 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmcbx\" (UniqueName: \"kubernetes.io/projected/257dfafb-8d80-4de2-97e5-96df6b004a43-kube-api-access-nmcbx\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400181 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400199 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-system-cni-dir\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400217 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-kubelet\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400234 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-systemd\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400253 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-var-lib-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400273 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-env-overrides\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400294 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400318 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/257dfafb-8d80-4de2-97e5-96df6b004a43-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400334 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-netns\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400348 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-node-log\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400361 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-log-socket\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400376 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-config\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400391 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-slash\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400410 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-bin\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400475 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-bin\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400520 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-cnibin\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400573 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-netd\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400606 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-systemd-units\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400635 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-etc-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.400663 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-ovn-kubernetes\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401275 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/257dfafb-8d80-4de2-97e5-96df6b004a43-cni-binary-copy\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401802 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-log-socket\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401819 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401803 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-netns\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401872 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-ovn\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401903 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-node-log\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401930 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-slash\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401962 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-var-lib-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.401966 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-openvswitch\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402044 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-system-cni-dir\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402104 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-os-release\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402128 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-kubelet\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402180 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-script-lib\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402230 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-systemd\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402397 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-config\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402736 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-env-overrides\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402776 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/257dfafb-8d80-4de2-97e5-96df6b004a43-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.402863 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/257dfafb-8d80-4de2-97e5-96df6b004a43-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.404953 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovn-node-metrics-cert\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.420586 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.420620 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.420629 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.420644 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.420653 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.424085 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7zv6\" (UniqueName: \"kubernetes.io/projected/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-kube-api-access-z7zv6\") pod \"ovnkube-node-fd6j8\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.424805 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.438253 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmcbx\" (UniqueName: \"kubernetes.io/projected/257dfafb-8d80-4de2-97e5-96df6b004a43-kube-api-access-nmcbx\") pod \"multus-additional-cni-plugins-v62gj\" (UID: \"257dfafb-8d80-4de2-97e5-96df6b004a43\") " pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.442719 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4ss4n" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.459856 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.461719 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-v62gj" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.496733 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 21:47:25.353198395 +0000 UTC Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.523395 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.523424 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.523434 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.523449 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.523459 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.553257 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.553292 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.553267 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:26 crc kubenswrapper[4829]: E0122 00:07:26.553400 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:26 crc kubenswrapper[4829]: E0122 00:07:26.553493 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:26 crc kubenswrapper[4829]: E0122 00:07:26.553588 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.627853 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.627941 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.627977 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.627999 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.628011 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.706715 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579" exitCode=0 Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.706771 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.706795 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"711074ca5051de154268f10698a9c0a6ab129910a1ec19030c4c0dd967a58a80"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.709680 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.709730 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.709744 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"f4e0b94b0d82e3ece3aacb1be255a92f7eec360ea8fdf62b0e204c8f515b1f43"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.711240 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vgv2h" event={"ID":"8d27a46b-130f-4497-af81-45ea63a50632","Type":"ContainerStarted","Data":"22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.711289 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vgv2h" event={"ID":"8d27a46b-130f-4497-af81-45ea63a50632","Type":"ContainerStarted","Data":"4bb4c0cc77a1a415b1c562b383de8f478a7720f3a8561bcbb16066d0dc8e395c"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.712410 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerStarted","Data":"4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.712442 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerStarted","Data":"663ce4d436f51962c31562c1f62cbd14d87471e65a24b1661eefff2350d13453"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.719462 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerStarted","Data":"a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.719485 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerStarted","Data":"6e6cedfc83a295ae015e08673ac8ab926ef53eae719760c46f85efc616355112"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.727556 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.729989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.730025 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.730036 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.730052 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.730063 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.739108 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.753219 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.767029 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.777648 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.790743 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.803703 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.824706 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.835830 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.835861 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.835870 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.835885 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.835895 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.843462 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.855184 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.874114 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.886626 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.897599 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.907233 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.918513 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.931442 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.939055 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.939099 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.939113 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.939130 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.939141 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:26Z","lastTransitionTime":"2026-01-22T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.942368 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.956188 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.967926 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:26 crc kubenswrapper[4829]: I0122 00:07:26.986497 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.001889 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.015174 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.026084 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.041534 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.041573 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.041581 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.041594 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.041602 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.044435 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.081934 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.131407 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.144050 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.144721 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.144761 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.144772 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.144790 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.144800 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.155579 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.247806 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.247840 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.247852 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.247870 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.247883 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.350293 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.350318 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.350327 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.350341 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.350349 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.453330 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.453396 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.453414 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.453440 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.453457 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.496875 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 13:55:43.149435863 +0000 UTC Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.557038 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.557073 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.557088 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.557106 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.557121 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.610279 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-knbr4"] Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.610692 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.617132 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.617437 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.617568 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.618031 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.627557 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.637695 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.651355 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.659448 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.659477 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.659486 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.659704 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.659717 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.664031 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.680358 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.691063 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.718657 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/745ab832-48a6-4ce1-988c-30153d4ef1d8-host\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.718717 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smj89\" (UniqueName: \"kubernetes.io/projected/745ab832-48a6-4ce1-988c-30153d4ef1d8-kube-api-access-smj89\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.718785 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/745ab832-48a6-4ce1-988c-30153d4ef1d8-serviceca\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.721219 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.727066 4829 generic.go:334] "Generic (PLEG): container finished" podID="257dfafb-8d80-4de2-97e5-96df6b004a43" containerID="a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce" exitCode=0 Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.727144 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerDied","Data":"a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.729684 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.729733 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.746897 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.763435 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.763761 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.763771 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.763786 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.763795 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.764071 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.791836 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.806637 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.820271 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.820418 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/745ab832-48a6-4ce1-988c-30153d4ef1d8-serviceca\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.820444 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/745ab832-48a6-4ce1-988c-30153d4ef1d8-host\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.820470 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smj89\" (UniqueName: \"kubernetes.io/projected/745ab832-48a6-4ce1-988c-30153d4ef1d8-kube-api-access-smj89\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.820580 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/745ab832-48a6-4ce1-988c-30153d4ef1d8-host\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.821704 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/745ab832-48a6-4ce1-988c-30153d4ef1d8-serviceca\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.832523 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.839464 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smj89\" (UniqueName: \"kubernetes.io/projected/745ab832-48a6-4ce1-988c-30153d4ef1d8-kube-api-access-smj89\") pod \"node-ca-knbr4\" (UID: \"745ab832-48a6-4ce1-988c-30153d4ef1d8\") " pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.843097 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.856308 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.865497 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.865524 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.865532 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.865560 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.865569 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.886347 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.902996 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.916379 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.942366 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.954557 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.961119 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-knbr4" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.967224 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.967897 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.967935 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.967947 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.967963 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.967974 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:27Z","lastTransitionTime":"2026-01-22T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:27 crc kubenswrapper[4829]: I0122 00:07:27.991571 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.005658 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.029331 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.059480 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.069585 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.069613 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.069620 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.069634 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.069643 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.104302 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.141745 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.155005 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.165318 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.173883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.173909 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.173918 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.173933 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.173941 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.177732 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.239926 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.240076 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240120 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:07:36.240095167 +0000 UTC m=+34.276337079 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240207 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240227 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.240229 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240240 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240297 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:36.240279493 +0000 UTC m=+34.276521465 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240340 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240353 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240363 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.240392 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:36.240386006 +0000 UTC m=+34.276627918 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.276132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.276166 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.276177 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.276193 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.276207 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.340781 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.340842 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.340932 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.340991 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.341008 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:36.340991168 +0000 UTC m=+34.377233080 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.341063 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:36.34104316 +0000 UTC m=+34.377285102 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.379087 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.379118 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.379126 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.379140 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.379148 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.481739 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.481796 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.481808 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.481833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.481846 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.497047 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 13:42:30.580828066 +0000 UTC Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.552706 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.552922 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.553446 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.553587 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.553674 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:28 crc kubenswrapper[4829]: E0122 00:07:28.553755 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.584655 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.584709 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.584723 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.584742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.584754 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.701444 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.701517 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.701560 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.701591 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.701604 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.734705 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerStarted","Data":"87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.739276 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.739381 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.739405 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.739425 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.741365 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-knbr4" event={"ID":"745ab832-48a6-4ce1-988c-30153d4ef1d8","Type":"ContainerStarted","Data":"849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.741418 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-knbr4" event={"ID":"745ab832-48a6-4ce1-988c-30153d4ef1d8","Type":"ContainerStarted","Data":"8f3abe923ccc44f2a79d497f1ca1c4bbfb32efba757653552fd3272e2d4ca628"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.757482 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.774191 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.787602 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.802490 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.803668 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.803701 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.803713 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.803731 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.803743 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.819847 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.835259 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.852374 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.870270 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.885670 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.905601 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.905646 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.905661 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.905682 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.905697 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:28Z","lastTransitionTime":"2026-01-22T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.917221 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.937576 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.948163 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.971171 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.984808 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:28 crc kubenswrapper[4829]: I0122 00:07:28.998426 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.008404 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.008454 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.008465 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.008485 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.008497 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.011943 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.023127 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.035881 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.048969 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.057396 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.072783 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.085841 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.096785 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.104845 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.110557 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.110596 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.110605 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.110621 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.110631 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.124148 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.135418 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.161575 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.203275 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.213185 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.213221 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.213232 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.213250 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.213260 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.240891 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.284205 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.316006 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.316041 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.316056 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.316073 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.316085 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.418626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.418650 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.418658 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.418671 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.418679 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.497228 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 03:21:59.400663219 +0000 UTC Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.521821 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.521873 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.521884 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.521902 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.521914 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.624949 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.625044 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.625063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.625088 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.625106 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.728302 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.728361 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.728379 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.728406 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.728454 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.751799 4829 generic.go:334] "Generic (PLEG): container finished" podID="257dfafb-8d80-4de2-97e5-96df6b004a43" containerID="87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1" exitCode=0 Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.751889 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerDied","Data":"87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.773318 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.797043 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.813214 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.826813 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.830970 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.831012 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.831023 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.831040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.831052 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.841377 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.856094 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.872361 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.886560 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.905179 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.920283 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.930602 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.934414 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.934495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.934508 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.934590 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.934605 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:29Z","lastTransitionTime":"2026-01-22T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.942653 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.959564 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.978906 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:29 crc kubenswrapper[4829]: I0122 00:07:29.992376 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.037789 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.037840 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.037857 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.037883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.037902 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.140883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.140958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.140988 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.141021 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.141043 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.243830 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.243901 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.243924 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.243956 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.243979 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.346930 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.346981 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.346999 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.347025 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.347042 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.450443 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.450501 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.450520 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.450576 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.450602 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.497681 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 06:21:45.304126367 +0000 UTC Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552587 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552640 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552640 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.552781 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552808 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552845 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552861 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.552901 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.552911 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.553020 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.593452 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.593581 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.593609 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.593637 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.593658 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.613718 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.618441 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.618506 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.618527 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.618624 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.618661 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.635581 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.638443 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.638471 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.638479 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.638495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.638504 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.650717 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.655767 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.655794 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.655802 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.655816 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.655825 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.669669 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.674359 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.674406 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.674415 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.674433 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.674443 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.688816 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: E0122 00:07:30.688933 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.690407 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.690450 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.690464 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.690483 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.690497 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.773707 4829 generic.go:334] "Generic (PLEG): container finished" podID="257dfafb-8d80-4de2-97e5-96df6b004a43" containerID="6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb" exitCode=0 Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.773824 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerDied","Data":"6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.783687 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.793632 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.793909 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.793958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.793974 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.794000 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.794017 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.809033 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.822327 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.836346 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.851721 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.871455 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.889085 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.896424 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.896453 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.896463 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.896477 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.896485 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.903157 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.915187 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.925765 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.936090 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.995081 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.998648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.998677 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.998687 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.998703 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:30 crc kubenswrapper[4829]: I0122 00:07:30.998714 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:30Z","lastTransitionTime":"2026-01-22T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.029003 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.039017 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.056779 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.101634 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.101877 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.101890 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.101908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.101920 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.204897 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.204939 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.204952 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.204970 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.204982 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.308485 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.308602 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.308627 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.308657 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.308675 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.412115 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.412194 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.412217 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.412255 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.412274 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.498084 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 15:03:08.515465494 +0000 UTC Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.514746 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.514790 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.514800 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.514816 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.514827 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.617606 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.617703 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.617721 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.617747 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.617765 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.721257 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.721311 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.721327 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.721350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.721366 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.795642 4829 generic.go:334] "Generic (PLEG): container finished" podID="257dfafb-8d80-4de2-97e5-96df6b004a43" containerID="569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920" exitCode=0 Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.795701 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerDied","Data":"569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.818328 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.823884 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.823940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.823961 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.823991 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.824013 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.848480 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.866899 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.883825 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.900051 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.927336 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.927392 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.927409 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.927431 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.927446 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:31Z","lastTransitionTime":"2026-01-22T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.928151 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.948584 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.960477 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.982310 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:31 crc kubenswrapper[4829]: I0122 00:07:31.993121 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.006301 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.018476 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.029996 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.030033 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.030042 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.030059 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.030068 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.031701 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.041907 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.052175 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.133126 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.133173 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.133186 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.133203 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.133219 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.236365 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.236428 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.236442 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.236462 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.236479 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.339317 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.339381 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.339393 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.339417 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.339434 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.354430 4829 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.442333 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.442383 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.442400 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.442423 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.442440 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.498759 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 14:56:17.454979438 +0000 UTC Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.546793 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.546845 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.546858 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.546876 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.546889 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.553385 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.553490 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:32 crc kubenswrapper[4829]: E0122 00:07:32.553581 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.553621 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:32 crc kubenswrapper[4829]: E0122 00:07:32.553757 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:32 crc kubenswrapper[4829]: E0122 00:07:32.553859 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.579067 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.595716 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.613849 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.635206 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.649228 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.649276 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.649289 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.649307 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.649321 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.652589 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.675361 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.692521 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.707285 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.725274 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.752123 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.752414 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.752500 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.752601 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.752690 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.758307 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.773265 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.786386 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.798405 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.802520 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerStarted","Data":"437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.813454 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.825830 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.839011 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.852360 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.855013 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.855239 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.855374 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.855503 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.855685 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.869131 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.885351 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.901450 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.916785 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.931628 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.944348 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.958256 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.958324 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.958338 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.958355 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.958367 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:32Z","lastTransitionTime":"2026-01-22T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.968729 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.981602 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:32 crc kubenswrapper[4829]: I0122 00:07:32.996648 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.008648 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.030319 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.044485 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.057342 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.060857 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.060911 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.060923 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.060941 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.060953 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.164217 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.164269 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.164282 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.164309 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.164322 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.266833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.267368 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.267503 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.267816 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.268066 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.370645 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.370685 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.370695 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.370713 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.370726 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.498985 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 13:26:38.199005619 +0000 UTC Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.499521 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.499595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.499623 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.499648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.499663 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.661451 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.661516 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.661525 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.661565 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.661577 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.765202 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.765267 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.765291 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.765319 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.765340 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.850392 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.851873 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.851975 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.867903 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.867940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.867952 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.867970 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.867982 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.876984 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.922452 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.922604 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.930161 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.933431 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.937581 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:33 crc kubenswrapper[4829]: I0122 00:07:33.955106 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.970992 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.971366 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.971395 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.971412 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.971437 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.971455 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:33Z","lastTransitionTime":"2026-01-22T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.985651 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:33.998376 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.009877 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.033746 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.048411 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.062307 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.073777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.073816 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.073827 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.073846 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.073857 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.091036 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.104587 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.120061 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.133293 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.147034 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.161664 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.175390 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.176016 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.176040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.176048 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.176063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.176073 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.203196 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.214823 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.238885 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.250991 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.270640 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.278461 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.278491 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.278502 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.278520 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.278532 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.284556 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.293199 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.303081 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.316272 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.340706 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.353051 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.364330 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.381029 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.381058 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.381067 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.381083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.381092 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.483695 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.483760 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.483797 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.483826 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.483843 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.499433 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 07:54:42.086524321 +0000 UTC Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.553110 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.553217 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:34 crc kubenswrapper[4829]: E0122 00:07:34.553276 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.553241 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:34 crc kubenswrapper[4829]: E0122 00:07:34.553398 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:34 crc kubenswrapper[4829]: E0122 00:07:34.553570 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.586663 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.586767 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.586793 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.586826 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.586850 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.689887 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.689933 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.689951 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.689972 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.689987 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.792664 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.792877 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.792940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.793000 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.793058 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.896332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.896382 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.896393 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.896413 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.896426 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.917210 4829 generic.go:334] "Generic (PLEG): container finished" podID="257dfafb-8d80-4de2-97e5-96df6b004a43" containerID="437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed" exitCode=0 Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.928817 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerDied","Data":"437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed"} Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.964585 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.990607 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.998748 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.998797 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.998810 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.998830 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:34 crc kubenswrapper[4829]: I0122 00:07:34.998846 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:34Z","lastTransitionTime":"2026-01-22T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.005141 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.024784 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.038141 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.050018 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.060007 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.081266 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.095369 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.104709 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.115129 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.127003 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.144256 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.159648 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.180185 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.182118 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.182146 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.182155 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.182177 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.182188 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.329815 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.329902 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.329916 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.329934 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.329946 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.432938 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.433075 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.433098 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.433122 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.433139 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.530076 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 14:24:55.073800302 +0000 UTC Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.536126 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.536186 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.536199 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.536223 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.536235 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.644161 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.644231 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.644250 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.644279 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.644305 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.747818 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.748208 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.748276 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.748406 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.748476 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.851387 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.851447 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.851461 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.851481 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.851494 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.927230 4829 generic.go:334] "Generic (PLEG): container finished" podID="257dfafb-8d80-4de2-97e5-96df6b004a43" containerID="fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28" exitCode=0 Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.927372 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerDied","Data":"fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.951352 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.955456 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.955497 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.955514 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.955570 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.955589 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:35Z","lastTransitionTime":"2026-01-22T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.966286 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:35 crc kubenswrapper[4829]: I0122 00:07:35.988665 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.006798 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.028783 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.046356 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.060704 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.060743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.060752 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.060772 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.060783 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.064064 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.081533 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.098616 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.111498 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.123686 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.159876 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.164027 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.164051 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.164060 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.164075 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.164088 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.197004 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.211129 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.259684 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.266397 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.266435 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.266445 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.266460 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.266469 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.292752 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.292910 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.292954 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.292988 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:07:52.292949283 +0000 UTC m=+50.329191195 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293089 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293112 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293125 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293174 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:52.293155899 +0000 UTC m=+50.329397991 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293252 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293269 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293284 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.293329 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:52.293321495 +0000 UTC m=+50.329563407 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.369194 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.369238 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.369248 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.369266 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.369275 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.394199 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.394263 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.394375 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.394446 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.394490 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:52.394470404 +0000 UTC m=+50.430712316 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.394521 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:52.394499905 +0000 UTC m=+50.430741857 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.472301 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.472344 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.472356 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.472376 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.472388 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.530614 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 04:56:22.979290277 +0000 UTC Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.553208 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.553254 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.553214 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.553369 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.553440 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:36 crc kubenswrapper[4829]: E0122 00:07:36.553516 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.576242 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.576310 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.576332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.576365 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.576390 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.679863 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.679939 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.679962 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.679993 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.680016 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.782831 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.782907 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.782926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.782957 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.782972 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.945979 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.946049 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.946061 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.946115 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.946134 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:36Z","lastTransitionTime":"2026-01-22T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.951237 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" event={"ID":"257dfafb-8d80-4de2-97e5-96df6b004a43","Type":"ContainerStarted","Data":"b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1"} Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.978197 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:36 crc kubenswrapper[4829]: I0122 00:07:36.990991 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.004283 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.015028 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.036086 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.048930 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.048979 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.048992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.049013 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.049026 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.050126 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.059821 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.072315 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.084691 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.104164 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.119313 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.134196 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.149062 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.151418 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.151482 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.151497 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.151517 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.151530 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.163654 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.181764 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.254350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.254420 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.254430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.254449 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.254460 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.358033 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.358079 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.358090 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.358107 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.358118 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.471059 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.471111 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.471121 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.471144 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.471154 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.531471 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 06:51:56.822307932 +0000 UTC Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.651941 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:37 crc kubenswrapper[4829]: E0122 00:07:37.652154 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.654354 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.654377 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.654386 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.654403 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.654414 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.757124 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.757171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.757180 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.757199 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.757209 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.859895 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.859927 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.859937 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.859953 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.859964 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.962702 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.962767 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.962786 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.962811 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:37 crc kubenswrapper[4829]: I0122 00:07:37.962841 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:37Z","lastTransitionTime":"2026-01-22T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.065103 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.065156 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.065169 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.065189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.065200 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.168150 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.168380 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.168444 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.168558 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.168697 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.272578 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.272646 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.272661 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.272683 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.272960 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.375022 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.375058 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.375067 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.375086 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.375096 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.477434 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.477506 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.477520 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.477574 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.477588 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.532020 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 20:13:42.71676496 +0000 UTC Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.553580 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.553754 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:38 crc kubenswrapper[4829]: E0122 00:07:38.553913 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:38 crc kubenswrapper[4829]: E0122 00:07:38.554060 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.580255 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.580317 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.580332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.580359 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.580376 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.683824 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.683917 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.683930 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.683951 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.683962 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.787384 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.787449 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.787468 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.787495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.787512 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.890373 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.890735 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.890882 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.891005 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.891118 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.994408 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.994466 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.994485 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.994508 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:38 crc kubenswrapper[4829]: I0122 00:07:38.994525 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:38Z","lastTransitionTime":"2026-01-22T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.114982 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.115038 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.115053 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.115071 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.115083 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.165783 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c"] Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.166362 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.166814 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ea70a412-747d-42b1-bcee-db4479d6c229-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.166871 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gh69\" (UniqueName: \"kubernetes.io/projected/ea70a412-747d-42b1-bcee-db4479d6c229-kube-api-access-2gh69\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.166932 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ea70a412-747d-42b1-bcee-db4479d6c229-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.166965 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ea70a412-747d-42b1-bcee-db4479d6c229-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.169506 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.173633 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.186660 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.205436 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.219034 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.219075 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.219084 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.221744 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.221820 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.224817 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.263255 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.268104 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ea70a412-747d-42b1-bcee-db4479d6c229-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.268253 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gh69\" (UniqueName: \"kubernetes.io/projected/ea70a412-747d-42b1-bcee-db4479d6c229-kube-api-access-2gh69\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.268384 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ea70a412-747d-42b1-bcee-db4479d6c229-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.268470 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ea70a412-747d-42b1-bcee-db4479d6c229-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.270936 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ea70a412-747d-42b1-bcee-db4479d6c229-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.271941 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ea70a412-747d-42b1-bcee-db4479d6c229-env-overrides\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.279127 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ea70a412-747d-42b1-bcee-db4479d6c229-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.358430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.358471 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.358482 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.358499 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.358508 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.373851 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gh69\" (UniqueName: \"kubernetes.io/projected/ea70a412-747d-42b1-bcee-db4479d6c229-kube-api-access-2gh69\") pod \"ovnkube-control-plane-749d76644c-8d59c\" (UID: \"ea70a412-747d-42b1-bcee-db4479d6c229\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.379462 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.393736 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.419756 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.435445 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.448416 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.460073 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.461164 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.461203 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.461216 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.461233 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.461245 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.474159 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.491407 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.494185 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.518458 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.532945 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 21:14:23.321698478 +0000 UTC Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.536847 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.550802 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.553220 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:39 crc kubenswrapper[4829]: E0122 00:07:39.553317 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.564838 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.564924 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.564950 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.564983 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.565008 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.571005 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.667889 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.667989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.668007 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.668033 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.668049 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.770830 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.771074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.771096 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.771124 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.771143 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.877968 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.878032 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.878045 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.878063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.878076 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.965993 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/0.log" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.969782 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2" exitCode=1 Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.969866 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.970936 4829 scope.go:117] "RemoveContainer" containerID="e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.975041 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" event={"ID":"ea70a412-747d-42b1-bcee-db4479d6c229","Type":"ContainerStarted","Data":"55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.975097 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" event={"ID":"ea70a412-747d-42b1-bcee-db4479d6c229","Type":"ContainerStarted","Data":"65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.975112 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" event={"ID":"ea70a412-747d-42b1-bcee-db4479d6c229","Type":"ContainerStarted","Data":"20aca56c45f338caef512e39479836830b1be57030a1d6346892cedac7da5e45"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.980407 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.980476 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.980499 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.980526 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.980573 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:39Z","lastTransitionTime":"2026-01-22T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:39 crc kubenswrapper[4829]: I0122 00:07:39.994032 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:39Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.010236 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.020671 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.032828 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.046822 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.058798 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.072913 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.082934 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.082969 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.082978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.082992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.083001 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.091988 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.103131 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.113953 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.131089 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"*v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.525934 6094 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.526590 6094 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527029 6094 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527163 6094 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.527397 6094 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527512 6094 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.527587 6094 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.528079 6094 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.144441 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.153601 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.171589 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.183286 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.185071 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.185310 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.185322 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.185340 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.185355 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.195244 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.207737 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.218841 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.229497 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.239325 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.253917 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.267010 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.288561 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.288602 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.288613 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.288629 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.288640 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.289644 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.302944 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.316049 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.325662 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.343836 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"*v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.525934 6094 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.526590 6094 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527029 6094 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527163 6094 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.527397 6094 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527512 6094 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.527587 6094 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.528079 6094 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.353867 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.366346 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.379515 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.391208 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.394687 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.394730 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.394742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.394758 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.394770 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.453100 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.497600 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.497631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.497641 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.497657 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.497667 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.599601 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 23:49:14.148345977 +0000 UTC Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.599656 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.599792 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.599876 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.600081 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.600105 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.600117 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.600088 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.600131 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.600169 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.646438 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-c82dd"] Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.647280 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.647347 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.659784 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.674220 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.680319 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpq9v\" (UniqueName: \"kubernetes.io/projected/74beaade-c8f6-4d34-842b-1c03fe72b195-kube-api-access-mpq9v\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.680355 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.686222 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.699710 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.711201 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.743200 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.744272 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.744351 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.744435 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.744510 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.744582 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.745579 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.745618 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.745630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.745648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.745658 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.759463 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.760492 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.773198 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.773354 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.773456 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.773565 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.773669 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.781482 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpq9v\" (UniqueName: \"kubernetes.io/projected/74beaade-c8f6-4d34-842b-1c03fe72b195-kube-api-access-mpq9v\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.781643 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.781893 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.782008 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:41.281992534 +0000 UTC m=+39.318234446 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.783748 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"*v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.525934 6094 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.526590 6094 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527029 6094 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527163 6094 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.527397 6094 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527512 6094 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.527587 6094 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.528079 6094 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.790039 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.795157 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.795328 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.795419 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.795492 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.795567 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.804878 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpq9v\" (UniqueName: \"kubernetes.io/projected/74beaade-c8f6-4d34-842b-1c03fe72b195-kube-api-access-mpq9v\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.809903 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.809979 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.813634 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.813668 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.813676 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.813691 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.813701 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.823349 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.825963 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.829024 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.829063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.829074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.829091 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.829102 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.841553 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.843012 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: E0122 00:07:40.843118 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.846202 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.846248 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.846258 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.846274 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.846283 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:40Z","lastTransitionTime":"2026-01-22T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.855374 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.863131 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.871160 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.886206 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.901069 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.903593 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:07:40 crc kubenswrapper[4829]: I0122 00:07:40.913007 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.023814 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.023840 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.023848 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.023862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.023874 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.028249 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/0.log" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.031670 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.031704 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.032191 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.045394 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.056948 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.069357 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.082642 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.101724 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.125636 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.125674 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.125682 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.125699 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.125710 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.125725 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.139927 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.158875 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.171189 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.187754 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.200053 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.210027 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.219027 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.228344 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.228385 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.228396 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.228413 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.228425 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.238600 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"*v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.525934 6094 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.526590 6094 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527029 6094 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527163 6094 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.527397 6094 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527512 6094 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.527587 6094 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.528079 6094 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.254049 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.263859 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.276260 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.285965 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.298955 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.313597 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.324949 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.325783 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:41 crc kubenswrapper[4829]: E0122 00:07:41.325882 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:41 crc kubenswrapper[4829]: E0122 00:07:41.325950 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:42.325929879 +0000 UTC m=+40.362171791 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.330921 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.330964 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.330974 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.330990 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.330999 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.352225 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.365205 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.381270 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.394008 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.430371 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.432856 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.432895 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.432909 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.432932 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.432945 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.442678 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.455399 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.520747 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.535125 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.535163 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.535172 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.535188 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.535197 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.552532 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:41 crc kubenswrapper[4829]: E0122 00:07:41.552729 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.558885 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"*v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.525934 6094 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.526590 6094 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527029 6094 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527163 6094 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.527397 6094 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527512 6094 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.527587 6094 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.528079 6094 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.571154 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.585699 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.597979 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.600134 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 18:14:30.910606571 +0000 UTC Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.637259 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.637307 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.637316 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.637329 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.637338 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.739852 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.739927 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.739951 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.739973 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.739984 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.842688 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.843063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.843160 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.843259 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.843346 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.946516 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.946595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.946609 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.946648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:41 crc kubenswrapper[4829]: I0122 00:07:41.946661 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:41Z","lastTransitionTime":"2026-01-22T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.040081 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/1.log" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.042093 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/0.log" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.047765 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3" exitCode=1 Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.047835 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.047917 4829 scope.go:117] "RemoveContainer" containerID="e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.049462 4829 scope.go:117] "RemoveContainer" containerID="1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3" Jan 22 00:07:42 crc kubenswrapper[4829]: E0122 00:07:42.049828 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.055878 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.055937 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.055958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.055989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.056010 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.090393 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.108361 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.122279 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.135128 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.158253 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.158315 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.158328 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.158347 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.158361 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.161659 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"*v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.525934 6094 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.526590 6094 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527029 6094 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527163 6094 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.527397 6094 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527512 6094 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.527587 6094 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.528079 6094 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:41Z\\\",\\\"message\\\":\\\"07449821398607916) with []\\\\nI0122 00:07:41.541915 6334 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 00:07:41.541953 6334 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 00:07:41.541960 6334 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:41.541980 6334 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:41.542013 6334 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:41.542066 6334 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:41.542220 6334 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542251 6334 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:41.542302 6334 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 00:07:41.542310 6334 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 00:07:41.542324 6334 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542340 6334 factory.go:656] Stopping watch factory\\\\nI0122 00:07:41.542355 6334 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 00:07:41.542362 6334 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:07:41.542397 6334 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:07:41.542472 6334 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.175530 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.186110 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.210640 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.224500 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.239151 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.251579 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.260419 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.260462 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.260472 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.260492 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.260508 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.267730 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.285344 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.298655 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.317474 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.334776 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.335944 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:42 crc kubenswrapper[4829]: E0122 00:07:42.336109 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:42 crc kubenswrapper[4829]: E0122 00:07:42.336171 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:44.336154903 +0000 UTC m=+42.372396825 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.350161 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.363277 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.363317 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.363329 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.363346 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.363357 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.466031 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.466088 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.466106 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.466132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.466151 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.553022 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.553080 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.553178 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:42 crc kubenswrapper[4829]: E0122 00:07:42.553275 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:42 crc kubenswrapper[4829]: E0122 00:07:42.553492 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:42 crc kubenswrapper[4829]: E0122 00:07:42.553662 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.568925 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.568978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.568994 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.569019 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.569048 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.573136 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.586988 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.600707 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 14:26:10.761679328 +0000 UTC Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.603782 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.624722 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.646191 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.666090 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.671152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.671224 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.671247 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.671280 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.671303 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.682943 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.717315 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.740605 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.758941 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.774049 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.774114 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.774132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.774159 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.774177 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.774320 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.797035 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6a4cbbb528e84102cd214c04efdce702ad4966e4fd6ac67df83231d549d2ab2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"message\\\":\\\"*v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.525934 6094 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.526590 6094 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527029 6094 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527163 6094 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:38.527397 6094 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:38.527512 6094 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.527587 6094 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0122 00:07:38.528079 6094 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:41Z\\\",\\\"message\\\":\\\"07449821398607916) with []\\\\nI0122 00:07:41.541915 6334 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 00:07:41.541953 6334 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 00:07:41.541960 6334 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:41.541980 6334 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:41.542013 6334 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:41.542066 6334 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:41.542220 6334 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542251 6334 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:41.542302 6334 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 00:07:41.542310 6334 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 00:07:41.542324 6334 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542340 6334 factory.go:656] Stopping watch factory\\\\nI0122 00:07:41.542355 6334 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 00:07:41.542362 6334 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:07:41.542397 6334 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:07:41.542472 6334 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.820358 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.833405 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.846813 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.865437 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.876840 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.877399 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.877430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.877442 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.877459 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.877470 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.979745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.979803 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.979823 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.979848 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:42 crc kubenswrapper[4829]: I0122 00:07:42.979867 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:42Z","lastTransitionTime":"2026-01-22T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.053319 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/1.log" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.058129 4829 scope.go:117] "RemoveContainer" containerID="1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3" Jan 22 00:07:43 crc kubenswrapper[4829]: E0122 00:07:43.058470 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.076399 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.081891 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.081943 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.081960 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.081983 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.082000 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.091476 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.104195 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.124909 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.143941 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.166474 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.177155 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.191846 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.191897 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.191910 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.191927 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.191938 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.211324 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.233090 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.256793 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.269421 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.293962 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.294002 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.294012 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.294029 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.294040 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.300470 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:41Z\\\",\\\"message\\\":\\\"07449821398607916) with []\\\\nI0122 00:07:41.541915 6334 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 00:07:41.541953 6334 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 00:07:41.541960 6334 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:41.541980 6334 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:41.542013 6334 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:41.542066 6334 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:41.542220 6334 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542251 6334 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:41.542302 6334 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 00:07:41.542310 6334 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 00:07:41.542324 6334 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542340 6334 factory.go:656] Stopping watch factory\\\\nI0122 00:07:41.542355 6334 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 00:07:41.542362 6334 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:07:41.542397 6334 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:07:41.542472 6334 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.318485 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.329365 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.395784 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.395844 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.395863 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.395890 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.395909 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.399602 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.415852 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.425914 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.498714 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.498745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.498754 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.498768 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.498777 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.553134 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:43 crc kubenswrapper[4829]: E0122 00:07:43.553375 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.601513 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 09:39:39.843482836 +0000 UTC Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.602003 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.602052 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.602069 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.602095 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.602115 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.705630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.705733 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.705762 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.705794 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.705816 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.808893 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.808967 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.808991 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.809024 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.809047 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.912305 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.912365 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.912387 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.912416 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:43 crc kubenswrapper[4829]: I0122 00:07:43.912439 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:43Z","lastTransitionTime":"2026-01-22T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.015898 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.015978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.016005 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.016036 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.016058 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.118884 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.118975 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.118994 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.119019 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.119038 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.222649 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.222757 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.222777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.222803 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.222823 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.325991 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.326073 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.326097 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.326125 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.326146 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.400683 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:44 crc kubenswrapper[4829]: E0122 00:07:44.400819 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:44 crc kubenswrapper[4829]: E0122 00:07:44.400914 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:48.400889912 +0000 UTC m=+46.437131924 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.429591 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.429644 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.429658 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.429687 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.429700 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.533588 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.533652 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.533670 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.533697 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.533717 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.553114 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.553144 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.553200 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:44 crc kubenswrapper[4829]: E0122 00:07:44.553332 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:44 crc kubenswrapper[4829]: E0122 00:07:44.553441 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:44 crc kubenswrapper[4829]: E0122 00:07:44.553769 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.601997 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 10:47:56.834838918 +0000 UTC Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.636908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.636947 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.636964 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.636989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.637008 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.739534 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.739952 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.740198 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.740432 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.740746 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.843997 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.844072 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.844094 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.844120 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.844137 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.947629 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.947694 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.947711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.947740 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:44 crc kubenswrapper[4829]: I0122 00:07:44.947758 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:44Z","lastTransitionTime":"2026-01-22T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.050632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.050691 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.050708 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.050733 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.050754 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.154350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.154411 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.154425 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.154453 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.154467 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.257605 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.257667 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.257685 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.257709 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.257727 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.361063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.361121 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.361142 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.361169 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.361188 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.463801 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.463931 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.463950 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.463976 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.463994 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.552679 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:45 crc kubenswrapper[4829]: E0122 00:07:45.552856 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.568364 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.568438 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.568459 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.568487 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.568509 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.602892 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 13:15:14.233132793 +0000 UTC Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.671803 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.671866 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.671883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.671909 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.671926 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.774795 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.774867 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.774886 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.774913 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.774933 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.878233 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.878309 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.878332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.878362 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.878382 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.981060 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.981130 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.981149 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.981174 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:45 crc kubenswrapper[4829]: I0122 00:07:45.981193 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:45Z","lastTransitionTime":"2026-01-22T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.083630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.083711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.083727 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.083781 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.083798 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.186183 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.186232 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.186242 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.186256 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.186267 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.289604 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.289659 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.289673 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.289694 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.289706 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.392926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.392992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.393006 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.393027 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.393042 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.496237 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.496507 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.496524 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.496572 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.496589 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.552835 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.552914 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.552860 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:46 crc kubenswrapper[4829]: E0122 00:07:46.553047 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:46 crc kubenswrapper[4829]: E0122 00:07:46.553268 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:46 crc kubenswrapper[4829]: E0122 00:07:46.553392 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.598960 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.598994 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.599005 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.599022 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.599033 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.603703 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 01:25:06.027826365 +0000 UTC Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.701885 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.701968 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.701997 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.702021 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.702039 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.804345 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.804390 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.804405 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.804422 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.804435 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.907185 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.907249 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.907267 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.907292 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:46 crc kubenswrapper[4829]: I0122 00:07:46.907311 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:46Z","lastTransitionTime":"2026-01-22T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.010157 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.010234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.010245 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.010267 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.010282 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.113292 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.113340 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.113349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.113373 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.113391 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.215634 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.215697 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.215714 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.215740 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.215757 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.318146 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.318186 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.318195 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.318210 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.318219 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.420852 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.420910 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.420929 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.420954 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.420971 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.524227 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.524274 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.524286 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.524336 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.524347 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.553321 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:47 crc kubenswrapper[4829]: E0122 00:07:47.553495 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.603833 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 07:12:45.204151054 +0000 UTC Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.627805 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.627902 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.627926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.627958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.627978 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.731360 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.731422 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.731442 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.731482 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.731501 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.836495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.836588 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.836629 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.836660 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.836682 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.939378 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.939423 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.939433 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.939448 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:47 crc kubenswrapper[4829]: I0122 00:07:47.939456 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:47Z","lastTransitionTime":"2026-01-22T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.042070 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.042123 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.042135 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.042152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.042163 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.144560 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.144604 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.144613 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.144633 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.144644 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.247504 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.247594 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.247609 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.247628 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.247641 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.350506 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.350580 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.350600 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.350633 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.350650 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.447464 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:48 crc kubenswrapper[4829]: E0122 00:07:48.447725 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:48 crc kubenswrapper[4829]: E0122 00:07:48.447847 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:07:56.447818628 +0000 UTC m=+54.484060580 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.454603 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.454659 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.454678 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.454704 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.454725 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.553233 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.553295 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.553239 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:48 crc kubenswrapper[4829]: E0122 00:07:48.553581 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:48 crc kubenswrapper[4829]: E0122 00:07:48.553747 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:48 crc kubenswrapper[4829]: E0122 00:07:48.553945 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.558383 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.558423 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.558433 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.558454 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.558466 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.604840 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 18:32:42.346768477 +0000 UTC Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.661906 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.661973 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.661991 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.662016 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.662033 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.765500 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.765745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.765769 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.765793 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.765810 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.869170 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.869211 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.869226 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.869246 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.869261 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.971862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.971917 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.971936 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.971962 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:48 crc kubenswrapper[4829]: I0122 00:07:48.971981 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:48Z","lastTransitionTime":"2026-01-22T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.075531 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.075640 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.075670 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.075721 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.075747 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.179044 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.179103 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.179120 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.179143 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.179158 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.282124 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.282197 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.282222 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.282273 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.282298 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.342181 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.355674 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.357997 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.371766 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.382989 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.384726 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.384750 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.384759 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.384772 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.384781 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.396723 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.413434 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.427473 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.437227 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.450314 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.458282 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.478782 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.486352 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.486388 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.486397 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.486411 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.486420 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.491796 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.502611 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.518615 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.539439 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:41Z\\\",\\\"message\\\":\\\"07449821398607916) with []\\\\nI0122 00:07:41.541915 6334 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 00:07:41.541953 6334 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 00:07:41.541960 6334 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:41.541980 6334 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:41.542013 6334 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:41.542066 6334 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:41.542220 6334 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542251 6334 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:41.542302 6334 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 00:07:41.542310 6334 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 00:07:41.542324 6334 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542340 6334 factory.go:656] Stopping watch factory\\\\nI0122 00:07:41.542355 6334 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 00:07:41.542362 6334 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:07:41.542397 6334 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:07:41.542472 6334 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.552750 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:49 crc kubenswrapper[4829]: E0122 00:07:49.552906 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.553883 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.570937 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.582356 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:49Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.589452 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.589509 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.589530 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.589584 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.589605 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.605735 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 00:03:09.748415068 +0000 UTC Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.692883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.692938 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.692956 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.692981 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.693001 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.798023 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.798088 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.798106 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.798131 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.798147 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.900727 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.900796 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.900813 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.900834 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:49 crc kubenswrapper[4829]: I0122 00:07:49.900852 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:49Z","lastTransitionTime":"2026-01-22T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.004967 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.005100 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.005180 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.005217 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.005305 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.108384 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.108500 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.108522 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.108585 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.108608 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.211767 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.211855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.211880 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.211918 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.211941 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.314992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.315074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.315097 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.315129 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.315153 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.418476 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.418581 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.418596 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.418622 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.418635 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.521273 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.521356 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.521380 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.521410 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.521431 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.552886 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.552970 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:50 crc kubenswrapper[4829]: E0122 00:07:50.553059 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.553083 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:50 crc kubenswrapper[4829]: E0122 00:07:50.553290 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:50 crc kubenswrapper[4829]: E0122 00:07:50.553428 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.606960 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 08:29:15.92548081 +0000 UTC Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.624357 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.624406 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.624422 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.624493 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.624513 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.728054 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.728128 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.728152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.728187 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.728209 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.831574 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.831624 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.831641 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.831662 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.831677 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.934171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.934244 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.934268 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.934298 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.934318 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.960776 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.960856 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.960875 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.960900 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.960920 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:50 crc kubenswrapper[4829]: E0122 00:07:50.982858 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.987116 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.987170 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.987192 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.987222 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:50 crc kubenswrapper[4829]: I0122 00:07:50.987247 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:50Z","lastTransitionTime":"2026-01-22T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: E0122 00:07:51.003067 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:51Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.007495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.007609 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.007628 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.007657 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.007700 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: E0122 00:07:51.023416 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:51Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.031641 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.031732 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.031754 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.031779 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.031795 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: E0122 00:07:51.044490 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:51Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.047855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.047894 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.047904 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.047920 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.047930 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: E0122 00:07:51.059181 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:51Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:51 crc kubenswrapper[4829]: E0122 00:07:51.059287 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.061170 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.061225 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.061241 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.061266 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.061282 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.164955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.165033 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.165056 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.165088 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.165110 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.267953 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.268009 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.268026 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.268051 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.268068 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.371106 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.371224 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.371247 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.371277 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.371298 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.474281 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.474341 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.474353 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.474370 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.474382 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.552934 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:51 crc kubenswrapper[4829]: E0122 00:07:51.553061 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.577019 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.577105 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.577133 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.577168 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.577186 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.607379 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 18:57:21.9682219 +0000 UTC Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.679447 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.679529 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.679599 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.679631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.679659 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.782498 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.782585 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.782599 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.782617 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.782629 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.885300 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.885351 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.885367 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.885387 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.885400 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.987637 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.987686 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.987701 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.987727 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:51 crc kubenswrapper[4829]: I0122 00:07:51.987744 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:51Z","lastTransitionTime":"2026-01-22T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.091018 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.091075 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.091089 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.091107 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.091117 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.194246 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.194349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.194367 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.194394 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.194412 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.297986 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.298044 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.298060 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.298089 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.298111 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.389181 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.389326 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389407 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:08:24.389378588 +0000 UTC m=+82.425620510 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389467 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389491 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389511 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.389535 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389607 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:08:24.389584544 +0000 UTC m=+82.425826496 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389728 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389755 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389769 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.389821 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:08:24.389806511 +0000 UTC m=+82.426048453 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.401486 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.401526 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.401557 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.401577 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.401591 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.490800 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.490858 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.490940 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.491010 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.491037 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:08:24.491015001 +0000 UTC m=+82.527257003 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.491085 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:08:24.491064512 +0000 UTC m=+82.527306504 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.504919 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.504966 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.504983 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.505006 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.505086 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.552815 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.552961 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.553106 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.553154 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.553215 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:52 crc kubenswrapper[4829]: E0122 00:07:52.553357 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.577853 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.599353 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.607290 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.607326 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.607338 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.607354 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.607365 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.607507 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 15:13:48.696782337 +0000 UTC Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.616961 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.630760 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.649537 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.682657 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.700822 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.709764 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.709807 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.709817 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.709834 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.709845 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.718419 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.734534 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.763719 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:41Z\\\",\\\"message\\\":\\\"07449821398607916) with []\\\\nI0122 00:07:41.541915 6334 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 00:07:41.541953 6334 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 00:07:41.541960 6334 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:41.541980 6334 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:41.542013 6334 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:41.542066 6334 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:41.542220 6334 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542251 6334 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:41.542302 6334 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 00:07:41.542310 6334 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 00:07:41.542324 6334 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542340 6334 factory.go:656] Stopping watch factory\\\\nI0122 00:07:41.542355 6334 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 00:07:41.542362 6334 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:07:41.542397 6334 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:07:41.542472 6334 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.786410 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.801668 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.813773 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.813979 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.814090 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.814163 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.814233 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.820812 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.836392 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.854591 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.871373 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.885627 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.904948 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:52Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.917025 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.917120 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.917189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.917251 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:52 crc kubenswrapper[4829]: I0122 00:07:52.917318 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:52Z","lastTransitionTime":"2026-01-22T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.019386 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.019430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.019443 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.019459 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.019470 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.121992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.122041 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.122055 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.122072 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.122084 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.224099 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.224158 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.224170 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.224185 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.224195 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.327630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.327677 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.327692 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.327711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.327724 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.430285 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.430392 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.430418 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.430452 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.430477 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.533914 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.533989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.534013 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.534045 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.534069 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.553241 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:53 crc kubenswrapper[4829]: E0122 00:07:53.553471 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.607644 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 05:22:38.189643066 +0000 UTC Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.637141 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.637219 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.637271 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.637294 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.637310 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.740393 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.740441 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.740453 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.740470 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.740481 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.843587 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.843621 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.843631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.843646 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.843659 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.950347 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.950399 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.950426 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.950473 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:53 crc kubenswrapper[4829]: I0122 00:07:53.950498 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:53Z","lastTransitionTime":"2026-01-22T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.054330 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.054381 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.054393 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.054412 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.054423 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.158290 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.158352 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.158364 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.158383 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.158395 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.261395 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.261462 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.261487 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.261515 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.261580 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.364856 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.364912 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.364929 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.364953 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.364970 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.468398 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.468441 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.468453 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.468472 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.468483 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.553058 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.553324 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:54 crc kubenswrapper[4829]: E0122 00:07:54.553457 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:54 crc kubenswrapper[4829]: E0122 00:07:54.553688 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.553795 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:54 crc kubenswrapper[4829]: E0122 00:07:54.553954 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.570574 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.570617 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.570635 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.570657 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.570676 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.608609 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 23:25:08.394875568 +0000 UTC Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.673830 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.673883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.673893 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.673912 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.673923 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.776448 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.776512 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.776531 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.776595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.776614 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.879590 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.879628 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.879640 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.879659 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.879672 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.982635 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.982698 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.982718 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.982742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:54 crc kubenswrapper[4829]: I0122 00:07:54.982759 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:54Z","lastTransitionTime":"2026-01-22T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.086038 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.086100 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.086119 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.086147 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.086165 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.189418 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.189481 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.189522 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.189601 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.189628 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.292738 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.292798 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.292852 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.292872 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.292885 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.395767 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.395818 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.395833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.395856 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.395873 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.498267 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.498320 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.498336 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.498358 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.498375 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.553370 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:55 crc kubenswrapper[4829]: E0122 00:07:55.553771 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.602989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.603072 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.603097 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.603134 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.603158 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.609870 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 19:03:13.287057 +0000 UTC Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.706072 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.706127 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.706147 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.706171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.706188 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.808574 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.808630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.808644 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.808661 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.808672 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.911094 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.911158 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.911177 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.911204 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:55 crc kubenswrapper[4829]: I0122 00:07:55.911221 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:55Z","lastTransitionTime":"2026-01-22T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.013617 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.013682 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.013700 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.013725 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.013743 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.116817 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.117381 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.117406 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.117432 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.117449 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.220293 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.220343 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.220353 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.220372 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.220385 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.323599 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.323695 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.323714 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.323775 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.323793 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.426760 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.426855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.426877 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.426906 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.426930 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.531135 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.531204 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.531224 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.531252 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.531284 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.540593 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:56 crc kubenswrapper[4829]: E0122 00:07:56.540900 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:56 crc kubenswrapper[4829]: E0122 00:07:56.540976 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:08:12.540951499 +0000 UTC m=+70.577193451 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.552819 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:56 crc kubenswrapper[4829]: E0122 00:07:56.553024 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.553159 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.553328 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:56 crc kubenswrapper[4829]: E0122 00:07:56.553352 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:56 crc kubenswrapper[4829]: E0122 00:07:56.553729 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.610876 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 12:28:37.944645535 +0000 UTC Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.634375 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.634601 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.634661 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.634757 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.634819 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.737430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.737506 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.737530 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.737590 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.737612 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.840611 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.840899 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.841087 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.841256 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.841373 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.946125 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.946199 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.946221 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.946251 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:56 crc kubenswrapper[4829]: I0122 00:07:56.946271 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:56Z","lastTransitionTime":"2026-01-22T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.048848 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.048892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.048903 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.048921 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.048931 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.151521 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.151609 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.151632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.151661 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.151683 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.254666 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.254731 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.254750 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.254776 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.254796 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.356942 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.357035 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.357071 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.357115 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.357140 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.459029 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.459066 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.459078 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.459096 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.459107 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.552831 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:57 crc kubenswrapper[4829]: E0122 00:07:57.552999 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.561637 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.561768 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.561785 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.561807 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.561825 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.611182 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 18:35:28.621796476 +0000 UTC Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.664620 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.664686 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.664709 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.664738 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.664759 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.768199 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.768253 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.768265 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.768285 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.768297 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.870628 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.871024 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.871235 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.871435 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.871667 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.974922 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.974956 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.974967 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.974984 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:57 crc kubenswrapper[4829]: I0122 00:07:57.974995 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:57Z","lastTransitionTime":"2026-01-22T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.077796 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.077835 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.077849 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.077867 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.077879 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.181374 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.181444 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.181469 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.181498 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.181517 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.284726 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.284782 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.284794 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.284855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.284872 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.387892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.387962 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.387984 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.388012 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.388034 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.490349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.490393 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.490405 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.490419 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.490428 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.553003 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.553039 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:07:58 crc kubenswrapper[4829]: E0122 00:07:58.553610 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.553141 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:07:58 crc kubenswrapper[4829]: E0122 00:07:58.554325 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:07:58 crc kubenswrapper[4829]: E0122 00:07:58.554452 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.554796 4829 scope.go:117] "RemoveContainer" containerID="1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.591982 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.592246 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.592381 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.592596 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.592758 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.612006 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 07:16:18.937014956 +0000 UTC Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.700668 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.700740 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.700759 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.700790 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.700818 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.804673 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.804732 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.804750 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.804773 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.804790 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.915041 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.915089 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.915105 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.915129 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:58 crc kubenswrapper[4829]: I0122 00:07:58.915145 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:58Z","lastTransitionTime":"2026-01-22T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.018336 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.018383 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.018400 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.018424 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.018441 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.118108 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/1.log" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.119879 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.119906 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.119919 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.119934 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.119946 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.121719 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.122134 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.147669 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.169849 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.189703 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.206439 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.222349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.222383 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.222392 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.222407 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.222419 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.229789 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:41Z\\\",\\\"message\\\":\\\"07449821398607916) with []\\\\nI0122 00:07:41.541915 6334 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 00:07:41.541953 6334 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 00:07:41.541960 6334 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:41.541980 6334 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:41.542013 6334 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:41.542066 6334 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:41.542220 6334 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542251 6334 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:41.542302 6334 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 00:07:41.542310 6334 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 00:07:41.542324 6334 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542340 6334 factory.go:656] Stopping watch factory\\\\nI0122 00:07:41.542355 6334 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 00:07:41.542362 6334 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:07:41.542397 6334 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:07:41.542472 6334 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.243954 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.273501 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.360656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.360722 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.360746 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.360774 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.360789 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.379990 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.396112 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.417441 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.437821 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.463695 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.463754 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.463773 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.463797 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.463811 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.464229 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.520042 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.540307 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.553016 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:07:59 crc kubenswrapper[4829]: E0122 00:07:59.553125 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.560432 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.566194 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.566233 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.566248 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.566267 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.566278 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.578752 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.593561 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.610346 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:07:59Z is after 2025-08-24T17:21:41Z" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.612456 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 11:15:50.204231941 +0000 UTC Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.668900 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.668943 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.668955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.668972 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.668983 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.771262 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.771667 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.771688 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.771711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.771728 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.874930 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.875002 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.875026 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.875057 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.875082 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.978789 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.978843 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.978855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.978872 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:07:59 crc kubenswrapper[4829]: I0122 00:07:59.978883 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:07:59Z","lastTransitionTime":"2026-01-22T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.082179 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.082239 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.082261 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.082300 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.082322 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.126356 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/2.log" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.127044 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/1.log" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.130040 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686" exitCode=1 Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.130086 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.130120 4829 scope.go:117] "RemoveContainer" containerID="1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.132393 4829 scope.go:117] "RemoveContainer" containerID="54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686" Jan 22 00:08:00 crc kubenswrapper[4829]: E0122 00:08:00.132857 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.146923 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.158770 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.173289 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.184988 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.185864 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.185984 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.185853 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.186136 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.186309 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.200079 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.209907 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.222808 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.236741 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.250205 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.261101 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.277119 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.289430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.289498 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.289522 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.289589 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.289628 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.294467 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.308072 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.318770 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.333382 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1dabde54982cbc6363cf479705d3627fda28a1ffea67a85805fba20f47ee77a3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:41Z\\\",\\\"message\\\":\\\"07449821398607916) with []\\\\nI0122 00:07:41.541915 6334 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 00:07:41.541953 6334 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 00:07:41.541960 6334 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:41.541980 6334 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:41.542013 6334 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:41.542066 6334 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:41.542220 6334 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542251 6334 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:41.542302 6334 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 00:07:41.542310 6334 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 00:07:41.542324 6334 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:41.542340 6334 factory.go:656] Stopping watch factory\\\\nI0122 00:07:41.542355 6334 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 00:07:41.542362 6334 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:07:41.542397 6334 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:07:41.542472 6334 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.350426 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.365067 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.392471 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.392580 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.392612 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.392647 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.392666 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.406461 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.496360 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.496423 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.496447 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.496477 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.496604 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.553336 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:00 crc kubenswrapper[4829]: E0122 00:08:00.553481 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.553779 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:00 crc kubenswrapper[4829]: E0122 00:08:00.553884 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.554105 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:00 crc kubenswrapper[4829]: E0122 00:08:00.554208 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.599454 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.599522 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.599577 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.599603 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.599620 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.613202 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 21:37:55.108034756 +0000 UTC Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.702621 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.702673 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.702689 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.702710 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.702727 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.805483 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.805516 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.805526 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.805554 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.805567 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.908108 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.908144 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.908155 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.908171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:00 crc kubenswrapper[4829]: I0122 00:08:00.908183 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:00Z","lastTransitionTime":"2026-01-22T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.011222 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.011266 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.011276 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.011292 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.011304 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.113656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.113714 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.113732 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.113756 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.113775 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.134189 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/2.log" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.137088 4829 scope.go:117] "RemoveContainer" containerID="54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686" Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.137247 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.153792 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.164413 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.180471 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.197183 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.210870 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.216717 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.216769 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.216787 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.216999 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.217019 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.222943 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.236297 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.249199 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.263429 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.279767 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.291972 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.303571 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.316818 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.319563 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.319610 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.319626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.319648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.319664 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.332244 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.385369 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.399493 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.409278 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.419213 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.419243 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.419254 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.419270 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.419282 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.426883 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.431879 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.434823 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.434924 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.434981 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.435044 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.435098 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.445609 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.448700 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.448743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.448756 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.448777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.448792 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.461999 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.465095 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.465192 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.465260 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.465327 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.465383 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.478354 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.481377 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.481478 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.481537 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.481611 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.481688 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.493737 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:01Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.493885 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.495643 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.495680 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.495691 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.495707 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.495719 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.552634 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:01 crc kubenswrapper[4829]: E0122 00:08:01.553082 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.598660 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.598691 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.598702 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.598716 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.598727 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.613891 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 18:37:09.133283999 +0000 UTC Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.700929 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.701008 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.701024 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.701046 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.701066 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.803348 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.803425 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.803449 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.803474 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.803493 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.906518 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.906587 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.906599 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.906618 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:01 crc kubenswrapper[4829]: I0122 00:08:01.906630 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:01Z","lastTransitionTime":"2026-01-22T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.009128 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.009173 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.009188 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.009206 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.009218 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.111633 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.111699 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.111717 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.111744 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.111765 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.214366 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.214754 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.214899 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.215036 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.215301 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.318669 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.318742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.318761 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.318790 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.318814 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.421636 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.421681 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.421695 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.421712 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.421724 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.524259 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.524327 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.524350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.524379 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.524396 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.552632 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.552644 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.552717 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:02 crc kubenswrapper[4829]: E0122 00:08:02.552835 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:02 crc kubenswrapper[4829]: E0122 00:08:02.552918 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:02 crc kubenswrapper[4829]: E0122 00:08:02.553140 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.571046 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.593393 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.608708 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.614966 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 15:39:00.590799681 +0000 UTC Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.627165 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.627374 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.627455 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.627557 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.627667 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.632371 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.657411 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.673402 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.685085 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.705240 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.728823 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.731625 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.731708 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.731736 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.731769 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.731793 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.743907 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.768774 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.787332 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.800793 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.816675 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.833732 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.836338 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.836398 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.836416 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.836440 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.836457 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.854410 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.867497 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.881182 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.939332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.939371 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.939381 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.939398 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:02 crc kubenswrapper[4829]: I0122 00:08:02.939409 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:02Z","lastTransitionTime":"2026-01-22T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.042078 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.042122 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.042131 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.042147 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.042157 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.143933 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.143969 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.143978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.143992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.144001 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.246122 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.246165 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.246177 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.246196 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.246208 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.348659 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.348692 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.348702 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.348718 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.348728 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.451199 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.451250 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.451260 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.451275 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.451288 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.552521 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:03 crc kubenswrapper[4829]: E0122 00:08:03.552719 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.554214 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.554285 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.554301 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.554328 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.554345 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.616130 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 01:27:14.657684754 +0000 UTC Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.657457 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.657508 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.657521 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.657558 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.657572 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.760054 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.760127 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.760154 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.760185 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.760205 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.863527 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.863630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.863649 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.863681 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.863704 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.966192 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.966234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.966244 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.966261 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:03 crc kubenswrapper[4829]: I0122 00:08:03.966273 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:03Z","lastTransitionTime":"2026-01-22T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.068387 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.068443 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.068461 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.068487 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.068506 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.172325 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.172395 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.172415 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.172446 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.172474 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.275083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.275132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.275155 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.275185 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.275207 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.378335 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.378385 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.378401 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.378420 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.378434 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.480966 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.481211 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.481523 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.481583 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.481604 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.557355 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:04 crc kubenswrapper[4829]: E0122 00:08:04.557534 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.557687 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:04 crc kubenswrapper[4829]: E0122 00:08:04.557775 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.557842 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:04 crc kubenswrapper[4829]: E0122 00:08:04.557917 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.584380 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.584429 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.584447 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.584467 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.584484 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.616251 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 17:46:33.801740214 +0000 UTC Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.686982 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.687040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.687064 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.687088 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.687104 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.789873 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.789900 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.789909 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.789923 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.789932 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.892362 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.892389 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.892399 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.892414 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.892423 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.995577 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.995677 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.995697 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.995719 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:04 crc kubenswrapper[4829]: I0122 00:08:04.995737 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:04Z","lastTransitionTime":"2026-01-22T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.098595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.098666 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.098704 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.098734 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.098755 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.202042 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.202086 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.202102 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.202126 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.202144 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.305361 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.305743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.305938 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.306127 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.306269 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.409942 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.410002 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.410023 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.410050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.410070 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.513349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.513756 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.513893 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.514043 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.514175 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.553432 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:05 crc kubenswrapper[4829]: E0122 00:08:05.553659 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.616891 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 06:00:37.056359189 +0000 UTC Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.617525 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.617810 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.617835 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.617867 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.617889 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.720134 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.720173 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.720184 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.720201 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.720213 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.822816 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.822857 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.822868 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.822884 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.822908 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.924927 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.924975 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.924988 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.925005 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:05 crc kubenswrapper[4829]: I0122 00:08:05.925020 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:05Z","lastTransitionTime":"2026-01-22T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.027877 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.027917 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.027926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.027940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.027950 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.130242 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.130292 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.130305 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.130324 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.130335 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.232927 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.232990 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.233006 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.233029 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.233046 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.336021 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.336102 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.336124 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.336154 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.336177 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.438562 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.438607 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.438617 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.438631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.438640 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.540583 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.540637 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.540653 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.540675 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.540688 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.553205 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.553205 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.553201 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:06 crc kubenswrapper[4829]: E0122 00:08:06.553537 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:06 crc kubenswrapper[4829]: E0122 00:08:06.553632 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:06 crc kubenswrapper[4829]: E0122 00:08:06.553352 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.618474 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 02:27:40.681735719 +0000 UTC Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.643916 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.643983 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.644020 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.644048 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.644071 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.747371 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.747429 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.747446 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.747469 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.747490 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.849958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.850019 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.850039 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.850062 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.850078 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.952857 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.952966 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.952992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.953027 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:06 crc kubenswrapper[4829]: I0122 00:08:06.953049 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:06Z","lastTransitionTime":"2026-01-22T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.056234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.056304 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.056324 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.056353 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.056380 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.159423 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.159485 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.159501 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.159523 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.159574 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.262408 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.262455 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.262463 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.262479 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.262490 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.364473 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.364522 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.364533 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.364605 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.364620 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.467845 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.467940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.467950 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.467964 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.467973 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.553112 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:07 crc kubenswrapper[4829]: E0122 00:08:07.553320 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.570358 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.570407 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.570418 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.570435 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.570448 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.619282 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 20:22:17.771725551 +0000 UTC Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.672797 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.672843 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.672860 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.672884 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.672901 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.775308 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.775383 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.775405 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.775437 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.775461 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.877871 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.877920 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.877932 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.877951 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.877963 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.981353 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.981410 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.981426 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.981448 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:07 crc kubenswrapper[4829]: I0122 00:08:07.981461 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:07Z","lastTransitionTime":"2026-01-22T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.084311 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.084372 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.084389 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.084418 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.084435 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.186496 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.186623 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.186656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.186691 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.186712 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.289376 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.289422 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.289432 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.289449 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.289459 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.392893 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.392947 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.392960 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.392982 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.392996 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.495776 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.495821 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.495834 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.495850 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.495861 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.553610 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.553637 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:08 crc kubenswrapper[4829]: E0122 00:08:08.553786 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.553867 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:08 crc kubenswrapper[4829]: E0122 00:08:08.553928 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:08 crc kubenswrapper[4829]: E0122 00:08:08.554022 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.597829 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.597895 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.597914 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.597940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.597960 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.620225 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 17:09:47.915591424 +0000 UTC Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.700537 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.700636 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.700658 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.700685 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.700706 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.803101 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.803321 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.803335 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.803354 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.803370 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.905650 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.905695 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.905708 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.905727 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:08 crc kubenswrapper[4829]: I0122 00:08:08.905739 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:08Z","lastTransitionTime":"2026-01-22T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.007835 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.007899 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.007916 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.007941 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.007958 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.110572 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.110618 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.110632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.110680 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.110692 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.214163 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.214247 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.214274 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.214310 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.214332 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.317866 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.317923 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.317942 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.317966 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.317984 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.420379 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.420435 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.420451 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.420474 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.420491 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.522826 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.522891 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.522908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.522937 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.522954 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.553343 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:09 crc kubenswrapper[4829]: E0122 00:08:09.553497 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.620965 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 10:35:12.431950465 +0000 UTC Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.625825 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.625913 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.625932 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.625956 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.625973 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.729502 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.729567 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.729580 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.729598 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.729609 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.832350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.832427 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.832445 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.832981 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.833057 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.935859 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.935905 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.935917 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.935944 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:09 crc kubenswrapper[4829]: I0122 00:08:09.935957 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:09Z","lastTransitionTime":"2026-01-22T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.038495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.038563 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.038578 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.038595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.038606 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.141005 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.141059 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.141076 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.141102 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.141118 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.244037 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.244071 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.244079 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.244096 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.244106 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.346104 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.346146 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.346154 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.346171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.346180 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.449358 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.449396 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.449406 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.449423 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.449435 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552397 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552433 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552441 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552456 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552466 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552484 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552505 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.552581 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:10 crc kubenswrapper[4829]: E0122 00:08:10.552651 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:10 crc kubenswrapper[4829]: E0122 00:08:10.552772 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:10 crc kubenswrapper[4829]: E0122 00:08:10.552967 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.621570 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 00:44:11.01169461 +0000 UTC Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.655228 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.655434 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.655443 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.655458 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.655469 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.757499 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.757553 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.757568 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.757584 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.757597 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.860187 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.860224 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.860234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.860248 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.860259 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.962834 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.962892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.962904 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.962965 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:10 crc kubenswrapper[4829]: I0122 00:08:10.962980 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:10Z","lastTransitionTime":"2026-01-22T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.064773 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.064812 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.064821 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.064834 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.064843 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.167206 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.167244 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.167255 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.167272 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.167282 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.269617 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.269682 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.269710 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.269735 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.269761 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.372485 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.372564 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.372582 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.372606 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.372623 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.474936 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.474994 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.475006 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.475025 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.475041 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.552496 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:11 crc kubenswrapper[4829]: E0122 00:08:11.552643 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.577177 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.577212 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.577220 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.577236 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.577247 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.622097 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 22:54:26.710500102 +0000 UTC Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.628239 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.628305 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.628331 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.628362 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.628385 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: E0122 00:08:11.642694 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.645985 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.646021 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.646032 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.646051 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.646063 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: E0122 00:08:11.656468 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.659791 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.659815 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.659823 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.659834 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.659842 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: E0122 00:08:11.668925 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.672388 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.672422 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.672434 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.672451 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.672462 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: E0122 00:08:11.686615 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.690235 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.690267 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.690281 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.690298 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.690312 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: E0122 00:08:11.702229 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:11 crc kubenswrapper[4829]: E0122 00:08:11.702378 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.703885 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.703921 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.703935 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.703955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.703969 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.806574 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.806614 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.806626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.806642 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.806652 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.908373 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.908413 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.908425 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.908442 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:11 crc kubenswrapper[4829]: I0122 00:08:11.908452 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:11Z","lastTransitionTime":"2026-01-22T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.010906 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.010930 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.010939 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.010957 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.010975 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.112609 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.112644 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.112656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.112672 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.112681 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.214742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.214944 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.214963 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.214992 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.215012 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.317359 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.317463 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.317484 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.317511 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.317527 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.420085 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.420124 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.420135 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.420152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.420163 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.522068 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.522116 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.522127 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.522146 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.522160 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.552591 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:12 crc kubenswrapper[4829]: E0122 00:08:12.552704 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.552611 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.552591 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:12 crc kubenswrapper[4829]: E0122 00:08:12.552765 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:12 crc kubenswrapper[4829]: E0122 00:08:12.552891 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.563137 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.574791 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.585176 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.597086 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.610504 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.615954 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:12 crc kubenswrapper[4829]: E0122 00:08:12.616075 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:08:12 crc kubenswrapper[4829]: E0122 00:08:12.616119 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:08:44.616106057 +0000 UTC m=+102.652347969 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.622598 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 17:03:32.660536293 +0000 UTC Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.622637 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.623961 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.623989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.624000 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.624015 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.624025 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.635886 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.652004 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.663653 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.674891 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.684437 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.693466 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.722376 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.726352 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.726394 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.726409 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.726427 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.726439 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.740554 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.756284 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.765623 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.785524 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.800499 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:12Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.828023 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.828074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.828085 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.828099 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.828107 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.929922 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.929982 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.929996 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.930016 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:12 crc kubenswrapper[4829]: I0122 00:08:12.930029 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:12Z","lastTransitionTime":"2026-01-22T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.032618 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.032689 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.032712 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.032743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.032765 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.136998 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.137090 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.137107 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.137177 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.137198 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.184026 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/0.log" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.184079 4829 generic.go:334] "Generic (PLEG): container finished" podID="60f879f6-8b21-4e75-9a62-d372fec048e1" containerID="4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f" exitCode=1 Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.184105 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerDied","Data":"4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.184452 4829 scope.go:117] "RemoveContainer" containerID="4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.203947 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.219180 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.232765 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.240052 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.240102 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.240114 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.240132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.240145 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.242830 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.262089 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.274086 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.293069 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.311040 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.327213 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.338762 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.341758 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.341785 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.341793 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.341806 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.341816 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.357006 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.371484 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.385554 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.397289 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.409342 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.421277 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.433508 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.444447 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.444496 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.444514 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.444556 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.444574 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.447189 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:13Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.640431 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 23:01:43.239939169 +0000 UTC Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.640744 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.640760 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:13 crc kubenswrapper[4829]: E0122 00:08:13.640904 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:13 crc kubenswrapper[4829]: E0122 00:08:13.641041 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.642037 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.642063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.642071 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.642083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.642093 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.744692 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.744743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.744751 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.744766 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.744774 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.847113 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.847155 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.847172 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.847194 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.847211 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.950450 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.950502 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.950519 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.950567 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:13 crc kubenswrapper[4829]: I0122 00:08:13.950585 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:13Z","lastTransitionTime":"2026-01-22T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.053040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.053089 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.053107 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.053130 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.053146 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.156040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.156087 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.156103 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.156125 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.156144 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.189934 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/0.log" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.190003 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerStarted","Data":"83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.204813 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.217965 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.239166 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.255992 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.258663 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.258712 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.258724 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.258742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.258755 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.273614 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.293263 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.307007 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.328430 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.352317 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.360722 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.360777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.360795 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.360833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.360851 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.366981 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.390748 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.404050 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.414882 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.429446 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.443648 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.459657 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.462965 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.463000 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.463010 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.463024 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.463035 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.474179 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.488623 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:14Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.554791 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.554791 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:14 crc kubenswrapper[4829]: E0122 00:08:14.554913 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:14 crc kubenswrapper[4829]: E0122 00:08:14.555068 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.564794 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.564835 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.564844 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.564860 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.564869 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.641213 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 19:35:00.436607709 +0000 UTC Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.667648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.667704 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.667718 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.667741 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.667754 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.770096 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.770164 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.770186 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.770215 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.770238 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.872685 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.872725 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.872737 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.872753 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.872765 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.975211 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.975276 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.975295 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.975319 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:14 crc kubenswrapper[4829]: I0122 00:08:14.975338 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:14Z","lastTransitionTime":"2026-01-22T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.077822 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.077867 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.077879 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.077897 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.077909 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.179774 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.179823 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.179835 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.179851 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.179862 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.281842 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.281892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.281901 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.281918 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.281927 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.383722 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.383757 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.383766 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.383780 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.383789 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.485671 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.485709 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.485721 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.485738 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.485751 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.553150 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.553179 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:15 crc kubenswrapper[4829]: E0122 00:08:15.553339 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:15 crc kubenswrapper[4829]: E0122 00:08:15.553810 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.554010 4829 scope.go:117] "RemoveContainer" containerID="54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686" Jan 22 00:08:15 crc kubenswrapper[4829]: E0122 00:08:15.554164 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.588239 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.588279 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.588288 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.588303 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.588313 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.641452 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 13:11:34.362265947 +0000 UTC Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.689867 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.689900 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.689908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.689923 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.689934 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.792060 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.792096 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.792105 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.792121 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.792131 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.894765 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.894795 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.894805 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.894819 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.894828 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.997491 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.997569 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.997604 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.997623 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:15 crc kubenswrapper[4829]: I0122 00:08:15.997636 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:15Z","lastTransitionTime":"2026-01-22T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.100110 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.100177 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.100197 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.100211 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.100220 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.202841 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.202870 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.202878 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.202892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.202901 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.305131 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.305176 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.305186 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.305202 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.305211 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.407750 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.407797 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.407809 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.407827 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.407839 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.510667 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.510708 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.510723 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.510746 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.510762 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.553009 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.553266 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:16 crc kubenswrapper[4829]: E0122 00:08:16.553471 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:16 crc kubenswrapper[4829]: E0122 00:08:16.553708 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.613772 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.613804 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.613813 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.613827 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.613836 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.642401 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 01:03:02.879332352 +0000 UTC Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.716513 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.716564 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.716576 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.716593 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.716604 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.820166 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.820205 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.820213 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.820229 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.820240 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.922341 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.922375 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.922386 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.922401 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:16 crc kubenswrapper[4829]: I0122 00:08:16.922411 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:16Z","lastTransitionTime":"2026-01-22T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.025617 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.025691 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.025708 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.025731 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.025749 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.127907 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.127961 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.127978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.128001 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.128017 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.231089 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.231154 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.231172 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.231202 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.231219 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.333865 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.333976 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.333998 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.334022 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.334038 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.437052 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.437115 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.437131 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.437157 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.437175 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.540084 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.540122 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.540133 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.540152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.540164 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.553169 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:17 crc kubenswrapper[4829]: E0122 00:08:17.553333 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.553167 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:17 crc kubenswrapper[4829]: E0122 00:08:17.553582 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.642824 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 10:35:32.848683512 +0000 UTC Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.643591 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.643666 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.643685 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.643709 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.643728 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.746826 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.746881 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.746898 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.746923 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.746940 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.850255 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.850282 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.850291 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.850304 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.850312 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.953909 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.953974 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.953994 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.954017 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:17 crc kubenswrapper[4829]: I0122 00:08:17.954058 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:17Z","lastTransitionTime":"2026-01-22T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.057210 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.057275 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.057296 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.057322 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.057341 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.160272 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.160358 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.160383 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.160414 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.160436 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.264621 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.264653 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.264665 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.264681 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.264692 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.367749 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.367823 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.367841 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.367866 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.367883 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.470669 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.470714 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.470723 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.470739 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.470748 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.553000 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.553112 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:18 crc kubenswrapper[4829]: E0122 00:08:18.553245 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:18 crc kubenswrapper[4829]: E0122 00:08:18.553372 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.573656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.574050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.574078 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.574110 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.574134 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.643191 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 19:31:49.20344515 +0000 UTC Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.676433 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.676498 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.676518 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.676570 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.676590 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.780225 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.780311 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.780335 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.780363 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.780383 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.883220 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.883287 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.883306 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.883329 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.883345 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.986873 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.986946 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.986970 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.987013 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:18 crc kubenswrapper[4829]: I0122 00:08:18.987031 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:18Z","lastTransitionTime":"2026-01-22T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.090973 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.091050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.091073 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.091119 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.091168 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.194419 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.194491 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.194517 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.194580 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.194606 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.298026 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.298101 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.298121 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.298160 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.298178 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.401430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.401495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.401512 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.401537 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.401603 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.505144 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.505206 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.505223 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.505259 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.505277 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.553329 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.553476 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:19 crc kubenswrapper[4829]: E0122 00:08:19.553619 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:19 crc kubenswrapper[4829]: E0122 00:08:19.553666 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.608439 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.608484 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.608493 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.608509 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.608519 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.644087 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 22:08:34.509963641 +0000 UTC Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.711715 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.711748 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.711758 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.711775 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.711787 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.815665 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.815732 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.815749 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.815777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.815801 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.919405 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.919477 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.919495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.919523 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:19 crc kubenswrapper[4829]: I0122 00:08:19.919587 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:19Z","lastTransitionTime":"2026-01-22T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.023533 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.023632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.023650 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.023674 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.023690 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.126756 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.126820 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.126840 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.126865 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.126883 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.229990 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.230054 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.230072 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.230097 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.230114 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.333095 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.333151 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.333170 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.333196 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.333215 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.436858 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.436900 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.436910 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.436930 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.436943 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.539914 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.540024 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.540075 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.540105 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.540124 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.553179 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.553207 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:20 crc kubenswrapper[4829]: E0122 00:08:20.553368 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:20 crc kubenswrapper[4829]: E0122 00:08:20.553492 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.642656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.642702 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.642714 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.642733 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.642747 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.645110 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 03:29:59.72413426 +0000 UTC Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.745974 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.746038 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.746055 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.746080 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.746097 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.848528 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.848607 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.848626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.848648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.848666 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.951083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.951120 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.951134 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.951153 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:20 crc kubenswrapper[4829]: I0122 00:08:20.951167 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:20Z","lastTransitionTime":"2026-01-22T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.053283 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.053652 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.053846 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.054075 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.054199 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.156794 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.156833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.156843 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.156874 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.156886 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.259385 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.259710 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.259802 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.259936 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.260016 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.362359 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.362420 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.362437 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.362462 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.362486 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.466385 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.466449 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.466462 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.466481 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.466494 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.553282 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.553515 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.553577 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.553757 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.569734 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.569787 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.569803 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.569870 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.569890 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.645690 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 09:11:47.325386127 +0000 UTC Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.672020 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.672063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.672079 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.672104 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.672119 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.774510 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.774599 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.774617 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.774640 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.774657 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.792720 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.792753 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.792764 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.792777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.792791 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.813477 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.818261 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.818315 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.818329 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.818350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.818365 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.835310 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.839829 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.839982 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.840076 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.840203 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.840305 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.859459 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.864512 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.864597 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.864669 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.864697 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.864716 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.882395 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.887297 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.887348 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.887365 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.887389 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.887406 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.907605 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:21Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:21 crc kubenswrapper[4829]: E0122 00:08:21.907868 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.909881 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.909921 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.909936 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.909955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:21 crc kubenswrapper[4829]: I0122 00:08:21.909971 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:21Z","lastTransitionTime":"2026-01-22T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.012349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.012421 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.012443 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.012473 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.012495 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.155780 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.155833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.155849 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.155873 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.155893 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.259056 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.259116 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.259132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.259158 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.259177 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.380740 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.380811 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.380829 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.380856 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.380873 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.483791 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.483844 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.483862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.483890 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.483906 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.556281 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.556320 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:22 crc kubenswrapper[4829]: E0122 00:08:22.556442 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:22 crc kubenswrapper[4829]: E0122 00:08:22.557155 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.574394 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.589454 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.589634 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.589653 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.589792 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.590247 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.592525 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.609227 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.629424 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.646311 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 09:25:54.689784731 +0000 UTC Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.646773 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.660647 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.678666 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.697078 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.697304 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.698712 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.698757 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.698790 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.698811 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.714145 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.730153 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.747662 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.775416 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.795425 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.802595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.802678 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.802704 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.802735 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.802754 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.810559 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.843232 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.858567 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.873043 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.883955 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:22Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.904689 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.904711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.904719 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.904733 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:22 crc kubenswrapper[4829]: I0122 00:08:22.904741 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:22Z","lastTransitionTime":"2026-01-22T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.007645 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.007707 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.007731 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.007760 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.007781 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.109754 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.109817 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.109838 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.109865 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.109913 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.214074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.214121 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.214139 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.214162 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.214178 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.317231 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.317305 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.317327 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.317354 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.317372 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.419244 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.419332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.419357 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.419389 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.419414 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.566644 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.566704 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:23 crc kubenswrapper[4829]: E0122 00:08:23.566820 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:23 crc kubenswrapper[4829]: E0122 00:08:23.567116 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.568672 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.568711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.568724 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.568741 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.568758 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.646685 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 04:59:59.528254311 +0000 UTC Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.670592 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.670626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.670635 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.670651 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.670661 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.773779 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.773987 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.774008 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.774027 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.774055 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.877141 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.877189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.877204 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.877224 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.877239 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.980095 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.980189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.980222 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.980266 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:23 crc kubenswrapper[4829]: I0122 00:08:23.980292 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:23Z","lastTransitionTime":"2026-01-22T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.083454 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.083524 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.083578 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.083612 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.083637 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.187258 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.187338 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.187369 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.187401 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.187420 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.290657 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.290711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.290728 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.290758 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.290775 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.393430 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.393589 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.393615 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.393651 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.393675 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.476111 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.476264 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476370 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:28.476336972 +0000 UTC m=+146.512578944 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476454 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476486 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476505 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476616 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:09:28.47659277 +0000 UTC m=+146.512834732 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.476606 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476830 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476892 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.476921 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.477025 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:09:28.476994943 +0000 UTC m=+146.513236895 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.496698 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.496771 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.496789 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.496823 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.496842 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.553231 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.553330 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.553457 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.553641 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.577885 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.577991 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.578167 4829 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.578205 4829 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.578251 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:09:28.578229629 +0000 UTC m=+146.614471581 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: E0122 00:08:24.578671 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 00:09:28.578641592 +0000 UTC m=+146.614883544 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.601041 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.601102 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.601116 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.601140 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.601154 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.647253 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 00:25:46.26309261 +0000 UTC Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.704754 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.704819 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.704841 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.704952 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.704981 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.808713 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.808759 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.808770 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.808793 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.808808 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.912114 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.912194 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.912232 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.912265 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:24 crc kubenswrapper[4829]: I0122 00:08:24.912287 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:24Z","lastTransitionTime":"2026-01-22T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.018230 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.018308 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.018350 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.018394 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.018420 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.121938 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.122040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.122077 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.122148 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.122171 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.224819 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.224862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.224873 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.224889 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.224901 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.328076 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.328129 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.328140 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.328158 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.328171 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.431108 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.431181 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.431195 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.431218 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.431232 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.534238 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.534279 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.534288 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.534302 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.534311 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.552831 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.552952 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:25 crc kubenswrapper[4829]: E0122 00:08:25.552981 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:25 crc kubenswrapper[4829]: E0122 00:08:25.553184 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.636853 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.636922 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.636938 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.636963 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.636983 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.648087 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 13:55:51.690897132 +0000 UTC Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.740160 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.740222 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.740247 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.740276 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.740300 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.843501 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.843584 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.843596 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.843619 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.843632 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.947335 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.947454 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.947481 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.947524 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:25 crc kubenswrapper[4829]: I0122 00:08:25.947587 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:25Z","lastTransitionTime":"2026-01-22T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.050766 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.050833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.050850 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.050874 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.050891 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.153962 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.154068 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.154083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.154112 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.154129 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.257220 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.257298 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.257311 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.257337 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.257350 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.360694 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.360753 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.360776 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.360808 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.360830 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.464012 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.464076 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.464097 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.464122 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.464143 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.552789 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.552820 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:26 crc kubenswrapper[4829]: E0122 00:08:26.553034 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:26 crc kubenswrapper[4829]: E0122 00:08:26.553193 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.566419 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.566508 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.566530 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.566646 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.566668 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.648639 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 00:09:44.183066334 +0000 UTC Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.669518 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.669630 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.669649 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.669675 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.669694 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.772857 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.772961 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.772983 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.773011 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.773032 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.876611 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.876686 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.876712 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.876742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.876763 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.980104 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.980152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.980164 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.980184 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:26 crc kubenswrapper[4829]: I0122 00:08:26.980196 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:26Z","lastTransitionTime":"2026-01-22T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.083256 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.083318 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.083331 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.083349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.083360 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.187074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.187144 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.187160 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.187185 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.187205 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.290530 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.290614 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.290631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.290655 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.290671 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.394253 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.394320 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.394345 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.394377 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.394398 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.497817 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.497890 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.497908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.497937 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.497955 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.552799 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.552842 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:27 crc kubenswrapper[4829]: E0122 00:08:27.553001 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:27 crc kubenswrapper[4829]: E0122 00:08:27.553155 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.601216 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.601265 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.601277 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.601298 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.601311 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.649428 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 02:44:51.735805471 +0000 UTC Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.705878 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.706291 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.706309 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.706338 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.706356 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.808812 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.808880 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.808897 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.808926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.808944 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.914174 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.914263 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.914284 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.914313 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:27 crc kubenswrapper[4829]: I0122 00:08:27.914340 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:27Z","lastTransitionTime":"2026-01-22T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.018620 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.018679 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.018692 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.018715 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.018729 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.120813 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.120855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.120866 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.120883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.120894 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.223922 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.223972 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.223985 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.224002 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.224015 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.325983 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.326032 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.326043 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.326060 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.326155 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.429715 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.429772 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.429786 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.429807 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.429819 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.531764 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.531801 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.531810 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.531825 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.531834 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.552623 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.552782 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:28 crc kubenswrapper[4829]: E0122 00:08:28.552957 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:28 crc kubenswrapper[4829]: E0122 00:08:28.553097 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.634365 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.634489 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.634511 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.634566 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.634592 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.650580 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 03:47:04.003821182 +0000 UTC Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.738141 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.738194 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.738207 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.738229 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.738242 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.841493 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.841921 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.842077 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.842280 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.842417 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.945743 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.946087 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.946345 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.946655 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:28 crc kubenswrapper[4829]: I0122 00:08:28.946848 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:28Z","lastTransitionTime":"2026-01-22T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.050184 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.050241 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.050261 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.050286 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.050303 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.153116 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.153168 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.153184 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.153212 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.153230 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.256105 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.256168 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.256187 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.256211 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.256232 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.358736 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.358808 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.358832 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.358867 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.358889 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.462966 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.463035 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.463058 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.463087 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.463111 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.553003 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.553059 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:29 crc kubenswrapper[4829]: E0122 00:08:29.553439 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:29 crc kubenswrapper[4829]: E0122 00:08:29.553758 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.566286 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.566334 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.566346 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.566360 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.566374 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.651469 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 10:02:48.035295033 +0000 UTC Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.669425 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.669483 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.669500 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.669524 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.669579 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.772906 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.772966 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.772990 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.773020 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.773047 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.876154 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.876209 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.876227 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.876249 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.876265 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.978696 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.978764 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.978776 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.978795 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:29 crc kubenswrapper[4829]: I0122 00:08:29.978809 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:29Z","lastTransitionTime":"2026-01-22T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.082397 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.082472 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.082495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.082523 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.082581 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.184646 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.184719 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.184742 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.184771 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.184796 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.287130 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.287190 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.287214 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.287241 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.287261 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.389531 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.389593 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.389609 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.389626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.389637 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.492528 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.492645 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.492662 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.492687 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.492704 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.553076 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:30 crc kubenswrapper[4829]: E0122 00:08:30.553180 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.553433 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:30 crc kubenswrapper[4829]: E0122 00:08:30.553911 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.554037 4829 scope.go:117] "RemoveContainer" containerID="54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.594685 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.594939 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.594951 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.594968 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.594981 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.652517 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 08:57:26.291265454 +0000 UTC Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.698386 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.698447 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.698465 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.698491 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.698514 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.801235 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.801307 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.801320 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.801344 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.801359 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.905772 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.905826 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.905839 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.905864 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:30 crc kubenswrapper[4829]: I0122 00:08:30.905879 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:30Z","lastTransitionTime":"2026-01-22T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.008420 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.008457 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.008465 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.008480 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.008491 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.151071 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.151133 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.151148 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.151170 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.151181 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.255119 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/2.log" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.257560 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.258467 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.276474 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.276555 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.276573 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.276597 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.276616 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.289388 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.300413 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.314275 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.331359 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.348764 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.372985 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.378234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.378263 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.378276 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.378294 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.378305 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.392758 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.408339 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.420494 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.444819 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.458288 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.474604 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.480490 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.480860 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.481005 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.481157 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.481282 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.490238 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.505822 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.520286 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.532568 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.550858 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.552877 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.552912 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:31 crc kubenswrapper[4829]: E0122 00:08:31.553008 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:31 crc kubenswrapper[4829]: E0122 00:08:31.553080 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.564296 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:31Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.583268 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.583312 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.583328 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.583348 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.583361 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.653899 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 17:48:01.442651539 +0000 UTC Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.685806 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.685857 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.685873 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.685897 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.685912 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.787928 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.787958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.787969 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.787984 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.787994 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.890197 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.890255 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.890264 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.890279 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.890289 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.993029 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.993078 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.993095 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.993120 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:31 crc kubenswrapper[4829]: I0122 00:08:31.993136 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:31Z","lastTransitionTime":"2026-01-22T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.020826 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.020896 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.020916 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.020940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.020958 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.042990 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.048378 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.048450 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.048469 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.048495 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.048513 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.070466 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.075277 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.075317 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.075328 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.075345 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.075359 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.088888 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.093015 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.093050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.093060 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.093077 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.093089 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.112148 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.116229 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.116290 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.116307 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.116329 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.116346 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.131718 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.131950 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.133885 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.133922 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.133937 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.133957 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.133971 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.236434 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.236496 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.236519 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.236578 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.236605 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.264593 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/3.log" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.265648 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/2.log" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.268749 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" exitCode=1 Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.268811 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.268864 4829 scope.go:117] "RemoveContainer" containerID="54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.269831 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.270122 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.295844 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.312972 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.335409 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.340299 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.340335 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.340346 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.340363 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.340375 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.351627 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.370633 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.389386 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.409267 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.423878 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.444053 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.444083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.444093 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.444107 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.444116 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.446465 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:31Z\\\",\\\"message\\\":\\\"uter]} options:{GoMap:map[requested-tnl-key:2 router-port:rtots-crc]} port_security:{GoSet:[]} tag_request:{GoSet:[]} type:router] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {3cb9854d-2900-4fd0-baba-4bfcad667b19}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 00:08:31.790672 6931 obj_retry.go:551] Creating *v1.Node crc took: 35.338073ms\\\\nI0122 00:08:31.790702 6931 factory.go:1336] Added *v1.Node event handler 2\\\\nI0122 00:08:31.790722 6931 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:08:31.790743 6931 services_controller.go:189] Starting controller ovn-lb-controller for network=default\\\\nI0122 00:08:31.790765 6931 factory.go:656] Stopping watch factory\\\\nI0122 00:08:31.790783 6931 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:08:31.790809 6931 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:08:31.790827 6931 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:08:31.790885 6931 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:08:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.465758 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.480569 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.507378 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.521198 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.533703 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.545719 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.546975 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.547050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.547071 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.547101 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.547126 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.553091 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.553222 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.553449 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:32 crc kubenswrapper[4829]: E0122 00:08:32.554503 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.564772 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.566128 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.580443 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.593183 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.608402 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"064f3b9d-977a-4368-8634-b310fa6b3cff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e2ab466826f2b5f51036c71c1d030988a9d1055feaf49480276e73cb529d304\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7cb894e453b587455dd7347a2847dc834196552e6bc7327b559593ac4e3230d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7cb894e453b587455dd7347a2847dc834196552e6bc7327b559593ac4e3230d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.623773 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.638798 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.648717 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.648752 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.648763 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.648779 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.648792 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.651461 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.654029 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 19:07:16.387027552 +0000 UTC Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.663324 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.680657 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.693734 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.710285 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.723210 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.747270 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.751136 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.751195 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.751219 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.751244 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.751262 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.761580 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.775449 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.788322 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.807701 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54948fa627950cb8df7f9984fd67630b5ccf4f16de9d20b0076fee4cf34ef686\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:07:59Z\\\",\\\"message\\\":\\\"o:160\\\\nI0122 00:07:59.663399 6522 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663515 6522 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.663908 6522 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0122 00:07:59.664140 6522 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 00:07:59.664566 6522 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:07:59.664590 6522 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 00:07:59.664626 6522 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 00:07:59.664645 6522 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 00:07:59.664693 6522 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 00:07:59.664707 6522 factory.go:656] Stopping watch factory\\\\nI0122 00:07:59.664721 6522 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 00:07:59.664736 6522 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:07:59.664737 6522 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:31Z\\\",\\\"message\\\":\\\"uter]} options:{GoMap:map[requested-tnl-key:2 router-port:rtots-crc]} port_security:{GoSet:[]} tag_request:{GoSet:[]} type:router] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {3cb9854d-2900-4fd0-baba-4bfcad667b19}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 00:08:31.790672 6931 obj_retry.go:551] Creating *v1.Node crc took: 35.338073ms\\\\nI0122 00:08:31.790702 6931 factory.go:1336] Added *v1.Node event handler 2\\\\nI0122 00:08:31.790722 6931 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:08:31.790743 6931 services_controller.go:189] Starting controller ovn-lb-controller for network=default\\\\nI0122 00:08:31.790765 6931 factory.go:656] Stopping watch factory\\\\nI0122 00:08:31.790783 6931 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:08:31.790809 6931 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:08:31.790827 6931 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:08:31.790885 6931 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:08:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.821360 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.831670 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.844308 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.853932 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.853967 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.853975 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.853989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.853997 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.857114 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.868120 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:32Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.957234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.957282 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.957294 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.957315 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:32 crc kubenswrapper[4829]: I0122 00:08:32.957333 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:32Z","lastTransitionTime":"2026-01-22T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.058889 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.058918 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.058926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.058945 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.058955 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.162450 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.162499 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.162513 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.162531 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.162567 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.266002 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.266065 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.266084 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.266109 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.266127 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.273283 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/3.log" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.279687 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:08:33 crc kubenswrapper[4829]: E0122 00:08:33.279990 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.298705 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.313531 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.328460 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.340666 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.351327 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.361048 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.368214 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.368253 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.368263 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.368279 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.368291 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.376060 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.387290 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"064f3b9d-977a-4368-8634-b310fa6b3cff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e2ab466826f2b5f51036c71c1d030988a9d1055feaf49480276e73cb529d304\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7cb894e453b587455dd7347a2847dc834196552e6bc7327b559593ac4e3230d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7cb894e453b587455dd7347a2847dc834196552e6bc7327b559593ac4e3230d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.399122 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.411970 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.421161 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.435275 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.447147 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.458331 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.466772 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.470820 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.471163 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.471330 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.471529 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.471754 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.484331 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:31Z\\\",\\\"message\\\":\\\"uter]} options:{GoMap:map[requested-tnl-key:2 router-port:rtots-crc]} port_security:{GoSet:[]} tag_request:{GoSet:[]} type:router] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {3cb9854d-2900-4fd0-baba-4bfcad667b19}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 00:08:31.790672 6931 obj_retry.go:551] Creating *v1.Node crc took: 35.338073ms\\\\nI0122 00:08:31.790702 6931 factory.go:1336] Added *v1.Node event handler 2\\\\nI0122 00:08:31.790722 6931 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:08:31.790743 6931 services_controller.go:189] Starting controller ovn-lb-controller for network=default\\\\nI0122 00:08:31.790765 6931 factory.go:656] Stopping watch factory\\\\nI0122 00:08:31.790783 6931 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:08:31.790809 6931 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:08:31.790827 6931 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:08:31.790885 6931 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:08:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.500801 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.512219 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.550743 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:33Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.552847 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:33 crc kubenswrapper[4829]: E0122 00:08:33.552953 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.553117 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:33 crc kubenswrapper[4829]: E0122 00:08:33.553168 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.574426 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.574861 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.575007 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.575152 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.575425 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.654398 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 06:02:15.141352494 +0000 UTC Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.678416 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.678478 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.678496 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.678521 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.678577 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.781584 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.781628 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.781648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.781674 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.781691 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.884256 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.884305 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.884317 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.884341 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.884355 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.988416 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.988860 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.989022 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.989180 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:33 crc kubenswrapper[4829]: I0122 00:08:33.989345 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:33Z","lastTransitionTime":"2026-01-22T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.092956 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.093019 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.093043 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.093074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.093102 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.196111 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.196186 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.196221 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.196250 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.196271 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.298969 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.299030 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.299049 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.299074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.299089 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.402083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.402174 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.402195 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.402225 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.402243 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.505444 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.505474 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.505511 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.505528 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.505557 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.553265 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.553269 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:34 crc kubenswrapper[4829]: E0122 00:08:34.553468 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:34 crc kubenswrapper[4829]: E0122 00:08:34.553567 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.609508 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.609580 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.609592 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.609607 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.609618 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.655637 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 09:22:52.374135551 +0000 UTC Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.712725 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.712781 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.712799 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.712825 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.712842 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.816099 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.816130 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.816141 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.816157 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.816169 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.919451 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.919574 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.919607 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.919639 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:34 crc kubenswrapper[4829]: I0122 00:08:34.919658 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:34Z","lastTransitionTime":"2026-01-22T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.022716 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.022766 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.022777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.022794 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.022805 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.125833 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.125908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.125926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.125953 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.125969 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.227925 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.227966 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.227978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.227994 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.228006 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.330387 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.330428 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.330466 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.330489 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.330501 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.432997 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.433045 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.433056 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.433072 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.433083 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.536332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.536379 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.536390 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.536406 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.536417 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.552684 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:35 crc kubenswrapper[4829]: E0122 00:08:35.552805 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.552903 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:35 crc kubenswrapper[4829]: E0122 00:08:35.553175 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.639189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.639229 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.639240 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.639256 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.639268 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.656752 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 18:14:37.977460352 +0000 UTC Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.741892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.741968 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.741993 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.742025 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.742047 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.845168 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.845207 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.845218 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.845237 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.845259 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.948591 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.948687 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.948708 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.948734 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:35 crc kubenswrapper[4829]: I0122 00:08:35.948792 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:35Z","lastTransitionTime":"2026-01-22T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.052571 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.052626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.052643 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.052668 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.052685 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.155603 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.155650 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.155663 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.155687 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.155710 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.259060 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.259148 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.259171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.259202 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.259225 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.362812 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.362895 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.362931 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.362967 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.362992 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.465751 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.465801 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.465818 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.465842 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.465859 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.553314 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.553392 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:36 crc kubenswrapper[4829]: E0122 00:08:36.553460 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:36 crc kubenswrapper[4829]: E0122 00:08:36.553592 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.567997 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.568042 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.568057 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.568076 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.568090 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.657750 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 11:50:27.219789253 +0000 UTC Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.671113 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.671142 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.671150 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.671165 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.671173 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.774642 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.774763 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.774781 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.774806 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.774823 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.877766 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.877828 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.877850 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.877880 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.877902 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.981059 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.981119 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.981135 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.981161 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:36 crc kubenswrapper[4829]: I0122 00:08:36.981179 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:36Z","lastTransitionTime":"2026-01-22T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.084314 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.084378 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.084400 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.084425 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.084444 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.188655 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.188752 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.188802 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.188829 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.188913 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.290720 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.290755 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.290777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.290792 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.290801 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.393031 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.393065 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.393074 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.393087 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.393095 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.499782 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.499855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.499874 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.499902 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.499920 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.553355 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.553377 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:37 crc kubenswrapper[4829]: E0122 00:08:37.553608 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:37 crc kubenswrapper[4829]: E0122 00:08:37.553787 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.603094 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.603151 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.603173 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.603201 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.603223 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.658598 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 05:57:06.896850297 +0000 UTC Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.705898 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.705946 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.705963 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.705987 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.706004 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.809650 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.809721 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.809744 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.809773 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.809790 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.913131 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.913217 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.913246 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.913273 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:37 crc kubenswrapper[4829]: I0122 00:08:37.913291 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:37Z","lastTransitionTime":"2026-01-22T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.016970 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.017014 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.017023 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.017040 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.017050 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.120129 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.120189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.120206 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.120234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.120251 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.223202 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.223240 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.223253 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.223272 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.223285 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.325900 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.325961 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.325982 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.326008 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.326026 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.428166 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.428205 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.428217 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.428235 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.428246 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.531595 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.531651 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.531668 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.531696 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.531713 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.553275 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:38 crc kubenswrapper[4829]: E0122 00:08:38.553998 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.553294 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:38 crc kubenswrapper[4829]: E0122 00:08:38.554744 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.635145 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.635195 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.635211 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.635235 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.635253 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.659604 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 20:24:36.980630302 +0000 UTC Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.737944 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.737984 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.737999 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.738022 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.738038 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.840413 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.840456 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.840471 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.840488 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.840500 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.944114 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.944290 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.944323 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.944354 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:38 crc kubenswrapper[4829]: I0122 00:08:38.944378 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:38Z","lastTransitionTime":"2026-01-22T00:08:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.048171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.048222 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.048237 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.048261 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.048279 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.151389 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.151437 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.151448 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.151466 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.151476 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.253896 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.253955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.253968 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.253987 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.254001 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.356404 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.356456 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.356468 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.356485 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.356496 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.459417 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.459493 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.459511 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.459536 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.459579 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.553355 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.553400 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:39 crc kubenswrapper[4829]: E0122 00:08:39.553526 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:39 crc kubenswrapper[4829]: E0122 00:08:39.553658 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.562139 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.562198 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.562219 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.562250 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.562273 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.660396 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 09:28:49.928349556 +0000 UTC Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.664772 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.664815 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.664831 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.664855 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.664873 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.768171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.768221 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.768234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.768253 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.768268 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.870960 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.871000 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.871011 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.871026 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.871036 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.973961 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.974000 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.974011 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.974027 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:39 crc kubenswrapper[4829]: I0122 00:08:39.974038 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:39Z","lastTransitionTime":"2026-01-22T00:08:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.077745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.077811 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.077831 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.077862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.077886 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.181173 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.181223 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.181235 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.181254 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.181265 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.283955 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.284030 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.284068 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.284159 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.284189 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.386085 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.386122 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.386132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.386147 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.386158 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.488888 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.488929 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.488940 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.488958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.488970 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.552741 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.552770 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:40 crc kubenswrapper[4829]: E0122 00:08:40.552896 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:40 crc kubenswrapper[4829]: E0122 00:08:40.553155 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.592097 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.592160 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.592178 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.592199 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.592214 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.660894 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 21:54:48.169972149 +0000 UTC Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.695189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.695271 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.695308 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.695409 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.695436 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.798995 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.799056 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.799072 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.799098 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.799116 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.901566 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.901605 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.901619 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.901639 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:40 crc kubenswrapper[4829]: I0122 00:08:40.901653 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:40Z","lastTransitionTime":"2026-01-22T00:08:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.005348 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.005414 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.005438 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.005467 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.005489 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.108926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.108960 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.108968 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.108981 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.108989 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.211446 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.211497 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.211532 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.211602 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.211628 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.314150 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.314221 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.314246 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.314277 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.314300 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.417754 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.417837 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.417862 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.417892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.417959 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.520524 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.520616 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.520633 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.520664 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.520688 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.552877 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.552968 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:41 crc kubenswrapper[4829]: E0122 00:08:41.553006 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:41 crc kubenswrapper[4829]: E0122 00:08:41.553143 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.623296 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.623336 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.623349 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.623365 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.623378 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.661937 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 12:13:51.382973705 +0000 UTC Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.726318 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.726386 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.726409 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.726437 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.726457 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.829359 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.829419 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.829437 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.829460 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.829523 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.932517 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.932621 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.932645 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.932669 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:41 crc kubenswrapper[4829]: I0122 00:08:41.932686 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:41Z","lastTransitionTime":"2026-01-22T00:08:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.036138 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.036189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.036206 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.036231 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.036248 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.139029 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.139080 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.139096 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.139122 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.139138 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.241821 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.241887 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.241912 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.241943 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.241965 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.249135 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.249199 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.249216 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.249235 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.249249 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.269621 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.274991 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.275022 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.275030 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.275047 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.275057 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.293605 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.298454 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.298514 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.298525 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.298556 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.298569 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.315815 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.320885 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.321026 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.321095 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.321125 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.321174 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.343138 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.348842 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.348883 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.348892 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.348908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.348918 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.363508 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f4fcd0ed-d5ee-4338-b695-290cd8c07003\\\",\\\"systemUUID\\\":\\\"2617ce83-db58-4b50-b1b2-592bf3969365\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.363977 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.366063 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.366114 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.366123 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.366140 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.366151 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.469521 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.469613 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.469631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.469656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.469674 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.553093 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.553191 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.553280 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:42 crc kubenswrapper[4829]: E0122 00:08:42.553461 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.569744 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"80eb4c06-f6fe-4036-ba67-20a352c4c72a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0122 00:07:15.072195 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0122 00:07:15.073373 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4085308025/tls.crt::/tmp/serving-cert-4085308025/tls.key\\\\\\\"\\\\nI0122 00:07:20.792355 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0122 00:07:20.795090 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0122 00:07:20.795182 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0122 00:07:20.795250 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0122 00:07:20.795286 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0122 00:07:20.801529 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0122 00:07:20.801576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801583 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0122 00:07:20.801595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0122 00:07:20.801599 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0122 00:07:20.801604 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0122 00:07:20.801607 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0122 00:07:20.801682 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0122 00:07:20.804415 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.572355 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.572418 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.572468 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.572501 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.572522 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.592817 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1c7cde00-93d7-4b09-a6ee-a1526907c476\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e858ceb3c67aff1fcf98f95ed16251b559a39db167e352ed377df3d4ac09ee6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a1bbb7969a85c7664883b05a16ba0f010d7fac8043267eed88d6b94825fb03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a3d339ebc0665bcc495d641c4493d5044e2b88fac0ce4d1df7a7aa8afedd822\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.615226 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19a8d8b34d69924267118e6c4d6ee7b8d3997f319192811d60c53d6e47a18fa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.633274 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ea70a412-747d-42b1-bcee-db4479d6c229\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65417b8588477960ede9d8a15555b74822e2d5e3599f0725f7ef07f42d79215b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55764e49ae42fc92a2177278dd2db24901b22d3609b30b90650822c6972df4d1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2gh69\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:39Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-8d59c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.662522 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 03:27:36.998825383 +0000 UTC Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.668184 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56876940-6a7e-4cd4-bf02-33c360972412\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3b157d7d350321bdba175e2fa67520d78e531f897c5b56a5a73618994dbb5d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34fd8aae1143c5914aa62d9195ed277f8be4e7066f470765903bc3677cc78225\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7d2ce0e4e382060388ac3c8e261d7cfd5515d871b9c5127f7262fd4da2995ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f93da7845dd8b67c564a0b6b3f38d1621940a37932283e071b68e2f46204f129\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d973081500e66251b030465bc44599a6e5669513f508b8a894c0a3633f5fe15\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://142d67e003f305da0b14532209b3605a5f265282ae3772e8d50a5308d94c84b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e46215e544ab5a8bc44501cf79aacd1f10e1708340257673057fd80b89bfe4eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01804697fc4a883b5a2659b65497161e55a28b95a5fce7888a89570533553d11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.675002 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.675058 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.675081 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.675111 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.675132 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.688799 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.708907 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.724514 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vgv2h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8d27a46b-130f-4497-af81-45ea63a50632\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://22f017650c94852a3a6170dff4bf9bcea7f8dfd29732f2bcb38ec45b92b98edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-q5bvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vgv2h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.747499 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:31Z\\\",\\\"message\\\":\\\"uter]} options:{GoMap:map[requested-tnl-key:2 router-port:rtots-crc]} port_security:{GoSet:[]} tag_request:{GoSet:[]} type:router] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:8b3ef3d2-aa1a-4ff5-b390-b2bd8f0241f3}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {3cb9854d-2900-4fd0-baba-4bfcad667b19}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 00:08:31.790672 6931 obj_retry.go:551] Creating *v1.Node crc took: 35.338073ms\\\\nI0122 00:08:31.790702 6931 factory.go:1336] Added *v1.Node event handler 2\\\\nI0122 00:08:31.790722 6931 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 00:08:31.790743 6931 services_controller.go:189] Starting controller ovn-lb-controller for network=default\\\\nI0122 00:08:31.790765 6931 factory.go:656] Stopping watch factory\\\\nI0122 00:08:31.790783 6931 ovnkube.go:599] Stopped ovnkube\\\\nI0122 00:08:31.790809 6931 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 00:08:31.790827 6931 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 00:08:31.790885 6931 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:08:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z7zv6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fd6j8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.766115 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v62gj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"257dfafb-8d80-4de2-97e5-96df6b004a43\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9d55693ac801f6179327e96684c7afbb592bb48d88d7491f442637557693fc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a832dd6d7951b1f5d3b9dd5aeea0b7100be52e097c4afaa2d3d69a60395370ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87af3154a9cda086945ce89f085180fed141f868fbffa9acd88bb88e13bc11a1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6de4b52c7e4a8bc454ef9a263445a4bfb2666a186fd68269c7ab9b88e293efbb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://569a78052d595d02373c1719e6c6ad6587f08b91e4cc44058da21e243f18b920\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://437d135c0733c6dc3fde52a4738261d536e0579a846da1f3697dcde51fa203ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fec95c6e148430d5022889e4c5adb401256ab8bc5e388a93217b37392ed80d28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nmcbx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v62gj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.777698 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.777745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.777760 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.777781 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.777796 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.777842 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-knbr4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"745ab832-48a6-4ce1-988c-30153d4ef1d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://849a2bcc50034726ca69fd6bd336c67060f71ca7887a47e057426b0bce74a4f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-smj89\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:27Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-knbr4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.794358 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b59d33652179bf09cab0561792d1517c50066d9b7d23b05b9183518afd9ed83a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.806518 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:20Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.822411 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-c82dd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74beaade-c8f6-4d34-842b-1c03fe72b195\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mpq9v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:40Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-c82dd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.838093 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"064f3b9d-977a-4368-8634-b310fa6b3cff\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e2ab466826f2b5f51036c71c1d030988a9d1055feaf49480276e73cb529d304\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7cb894e453b587455dd7347a2847dc834196552e6bc7327b559593ac4e3230d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7cb894e453b587455dd7347a2847dc834196552e6bc7327b559593ac4e3230d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.856943 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"51b88d2f-3b05-42dd-85b5-eda696a7940f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42fb8905842fc9628adedb2e4d8f72324c1fa83cd086ff87ac599053d271742e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ebd2b2db90bf8261200aba8c0df5d07b321e2f4d6f428707c3d0716c0d684e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a949d7b4e46134e62601d299ac318fd21e3d1a46e9d66651e000cddbaf9732f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6093474e13ddab8aa5fd59064a344b8a0c61c2d1680097a789cae5efe6e760b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T00:07:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.874590 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fed75cc420b2a19be897d157ae091d570ef3306cf35bc7784b229c525bb140e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://48df9cf3fe66cdbe4839443c37f3bf054cd9b71181aae3ea78fcba4fad9749ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.880219 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.880280 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.880303 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.880338 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.880360 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.890240 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a30659bdc7473e536082587fecb004547edf4c613425ede4064b28a4cc24f27b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xmf49\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:25Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-x4jcr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.911195 4829 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4ss4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60f879f6-8b21-4e75-9a62-d372fec048e1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T00:08:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T00:08:13Z\\\",\\\"message\\\":\\\"2026-01-22T00:07:27+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877\\\\n2026-01-22T00:07:27+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_3cf3d1b5-252c-460a-8e5c-1bd10d71c877 to /host/opt/cni/bin/\\\\n2026-01-22T00:07:28Z [verbose] multus-daemon started\\\\n2026-01-22T00:07:28Z [verbose] Readiness Indicator file check\\\\n2026-01-22T00:08:13Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T00:07:26Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T00:08:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6kq6h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T00:07:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4ss4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T00:08:42Z is after 2025-08-24T17:21:41Z" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.983632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.983723 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.983744 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.983773 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:42 crc kubenswrapper[4829]: I0122 00:08:42.983797 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:42Z","lastTransitionTime":"2026-01-22T00:08:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.087078 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.087156 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.087172 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.087200 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.087220 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.189652 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.189744 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.189765 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.189788 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.189805 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.292348 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.292390 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.292400 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.292415 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.292427 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.395410 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.395453 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.395464 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.395481 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.395491 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.497789 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.497858 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.497896 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.497928 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.497962 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.552948 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.553156 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:43 crc kubenswrapper[4829]: E0122 00:08:43.553381 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:43 crc kubenswrapper[4829]: E0122 00:08:43.554623 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.618623 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.618725 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.618744 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.618771 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.618790 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.663127 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 00:17:30.003035976 +0000 UTC Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.722434 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.722507 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.722530 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.722596 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.722621 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.825052 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.825094 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.825106 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.825123 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.825133 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.927252 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.927300 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.927311 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.927329 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:43 crc kubenswrapper[4829]: I0122 00:08:43.927341 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:43Z","lastTransitionTime":"2026-01-22T00:08:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.029673 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.029712 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.029725 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.029745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.029755 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.132467 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.132504 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.132516 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.132531 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.132562 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.235602 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.235665 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.235685 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.235711 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.235728 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.338510 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.338605 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.338626 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.338693 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.338717 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.441337 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.441398 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.441424 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.441453 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.441476 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.543944 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.543978 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.543988 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.544002 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.544012 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.553825 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.553927 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:44 crc kubenswrapper[4829]: E0122 00:08:44.554003 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:44 crc kubenswrapper[4829]: E0122 00:08:44.554162 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.629013 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:44 crc kubenswrapper[4829]: E0122 00:08:44.629196 4829 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:08:44 crc kubenswrapper[4829]: E0122 00:08:44.629310 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs podName:74beaade-c8f6-4d34-842b-1c03fe72b195 nodeName:}" failed. No retries permitted until 2026-01-22 00:09:48.629283628 +0000 UTC m=+166.665525550 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs") pod "network-metrics-daemon-c82dd" (UID: "74beaade-c8f6-4d34-842b-1c03fe72b195") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.646597 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.646637 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.646651 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.646686 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.646714 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.664191 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 20:58:08.798689166 +0000 UTC Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.749658 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.749719 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.749738 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.749761 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.749779 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.852557 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.852611 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.852625 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.852643 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.852657 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.955628 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.955999 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.956153 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.956297 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:44 crc kubenswrapper[4829]: I0122 00:08:44.956442 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:44Z","lastTransitionTime":"2026-01-22T00:08:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.059157 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.059207 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.059220 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.059240 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.059252 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.161903 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.161957 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.161970 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.161989 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.162002 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.264361 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.264419 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.264433 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.264452 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.264467 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.366376 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.366417 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.366426 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.366440 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.366449 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.468818 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.468893 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.468915 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.468945 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.468970 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.553130 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.553204 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:45 crc kubenswrapper[4829]: E0122 00:08:45.553257 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:45 crc kubenswrapper[4829]: E0122 00:08:45.553348 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.554248 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:08:45 crc kubenswrapper[4829]: E0122 00:08:45.554415 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.571958 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.571996 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.572004 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.572017 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.572027 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.664970 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 20:06:12.962939421 +0000 UTC Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.675637 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.675747 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.675789 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.675826 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.675850 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.778570 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.779132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.779144 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.779164 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.779177 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.881785 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.881825 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.881834 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.881851 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.881865 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.984899 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.984971 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.984988 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.985008 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:45 crc kubenswrapper[4829]: I0122 00:08:45.985020 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:45Z","lastTransitionTime":"2026-01-22T00:08:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.093141 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.093207 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.093226 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.093304 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.093661 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.197588 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.197628 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.197639 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.197656 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.197668 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.300115 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.300171 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.300182 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.300200 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.300211 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.403673 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.403744 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.403766 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.403798 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.403821 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.507415 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.507484 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.507502 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.507529 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.507572 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.553259 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:46 crc kubenswrapper[4829]: E0122 00:08:46.553525 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.553667 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:46 crc kubenswrapper[4829]: E0122 00:08:46.553851 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.611096 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.611158 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.611179 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.611203 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.611223 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.665970 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 09:32:06.531426151 +0000 UTC Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.714231 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.714307 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.714332 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.714359 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.714378 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.817217 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.817266 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.817274 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.817289 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.817300 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.919745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.919817 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.919840 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.919870 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:46 crc kubenswrapper[4829]: I0122 00:08:46.919887 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:46Z","lastTransitionTime":"2026-01-22T00:08:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.022653 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.022719 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.022736 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.022795 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.022812 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.125745 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.125821 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.125841 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.125865 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.125882 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.228159 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.228213 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.228234 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.228307 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.228336 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.330831 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.330908 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.330926 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.330951 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.330968 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.434267 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.434333 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.434353 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.434379 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.434399 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.536714 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.536749 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.536761 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.536777 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.536788 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.553298 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.553405 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:47 crc kubenswrapper[4829]: E0122 00:08:47.553481 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:47 crc kubenswrapper[4829]: E0122 00:08:47.553646 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.639536 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.639653 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.639677 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.639709 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.639732 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.666226 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 01:40:17.345696029 +0000 UTC Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.742031 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.742081 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.742093 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.742111 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.742123 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.844969 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.845018 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.845031 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.845050 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.845064 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.948027 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.948128 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.948168 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.948204 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:47 crc kubenswrapper[4829]: I0122 00:08:47.948226 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:47Z","lastTransitionTime":"2026-01-22T00:08:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.051518 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.051613 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.051638 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.051666 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.051689 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.154425 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.154507 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.154533 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.154615 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.154643 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.257398 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.257457 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.257474 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.257499 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.257519 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.360384 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.360449 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.360466 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.360492 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.360512 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.463238 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.463281 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.463291 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.463308 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.463317 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.553380 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.553449 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:48 crc kubenswrapper[4829]: E0122 00:08:48.553673 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:48 crc kubenswrapper[4829]: E0122 00:08:48.553805 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.565631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.565700 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.565724 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.565756 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.565780 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.666613 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 14:42:13.566415955 +0000 UTC Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.677180 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.677227 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.677236 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.677258 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.677272 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.779881 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.779945 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.779967 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.779998 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.780020 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.883109 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.883173 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.883189 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.883216 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.883233 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.988937 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.988995 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.989019 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.989045 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:48 crc kubenswrapper[4829]: I0122 00:08:48.989061 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:48Z","lastTransitionTime":"2026-01-22T00:08:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.092889 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.092949 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.092967 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.092993 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.093010 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.196009 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.196083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.196118 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.196154 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.196193 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.299130 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.299201 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.299224 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.299255 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.299277 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.401633 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.401665 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.401673 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.401688 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.401696 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.503575 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.503648 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.503664 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.503686 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.503702 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.553091 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.553168 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:49 crc kubenswrapper[4829]: E0122 00:08:49.553308 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:49 crc kubenswrapper[4829]: E0122 00:08:49.553424 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.606113 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.606168 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.606181 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.606200 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.606211 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.667146 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 02:21:28.155489687 +0000 UTC Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.708397 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.708455 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.708474 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.708498 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.708515 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.811151 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.811187 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.811198 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.811215 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.811227 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.914083 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.914121 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.914132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.914150 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:49 crc kubenswrapper[4829]: I0122 00:08:49.914162 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:49Z","lastTransitionTime":"2026-01-22T00:08:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.017420 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.017480 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.017497 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.017524 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.017584 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.120099 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.120195 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.120218 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.120252 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.120275 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.222933 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.222986 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.223015 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.223032 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.223044 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.325575 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.325660 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.325682 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.325716 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.325747 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.428482 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.428583 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.428601 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.428625 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.428646 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.531643 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.531707 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.531723 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.531752 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.531771 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.552828 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.552939 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:50 crc kubenswrapper[4829]: E0122 00:08:50.553002 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:50 crc kubenswrapper[4829]: E0122 00:08:50.553163 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.635657 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.635734 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.635762 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.635789 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.635813 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.667477 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 19:26:51.437944503 +0000 UTC Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.738390 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.738443 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.738460 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.738486 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.738503 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.841959 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.842014 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.842032 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.842055 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.842073 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.944895 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.945036 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.945058 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.945084 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:50 crc kubenswrapper[4829]: I0122 00:08:50.945102 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:50Z","lastTransitionTime":"2026-01-22T00:08:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.048131 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.048170 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.048205 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.048222 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.048233 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.151465 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.151527 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.151582 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.151615 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.151637 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.254535 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.254621 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.254638 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.254663 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.254682 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.357632 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.357691 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.357718 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.357740 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.357754 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.460314 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.460359 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.460370 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.460388 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.460405 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.553311 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.553390 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:51 crc kubenswrapper[4829]: E0122 00:08:51.553481 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:51 crc kubenswrapper[4829]: E0122 00:08:51.553608 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.563689 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.563757 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.563779 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.563808 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.563832 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.666796 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.666888 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.666972 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.666996 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.667041 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.668192 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 04:26:07.746970572 +0000 UTC Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.769761 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.769835 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.769850 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.769878 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.769894 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.872059 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.872118 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.872132 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.872153 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.872169 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.975526 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.975620 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.975638 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.975663 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:51 crc kubenswrapper[4829]: I0122 00:08:51.975680 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:51Z","lastTransitionTime":"2026-01-22T00:08:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.079013 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.079095 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.079128 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.079158 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.079181 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.182472 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.182612 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.182631 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.182658 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.182675 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.285944 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.286027 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.286053 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.286087 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.286114 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.388972 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.389046 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.389064 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.389090 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.389110 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.492431 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.492494 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.492509 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.492532 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.492567 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.552793 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.552788 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:52 crc kubenswrapper[4829]: E0122 00:08:52.553052 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:52 crc kubenswrapper[4829]: E0122 00:08:52.553171 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.598974 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.599028 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.599039 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.599067 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.599080 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.639017 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-4ss4n" podStartSLOduration=87.638995443 podStartE2EDuration="1m27.638995443s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.628903035 +0000 UTC m=+110.665144957" watchObservedRunningTime="2026-01-22 00:08:52.638995443 +0000 UTC m=+110.675237355" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.652037 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=20.652012423 podStartE2EDuration="20.652012423s" podCreationTimestamp="2026-01-22 00:08:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.639430007 +0000 UTC m=+110.675671939" watchObservedRunningTime="2026-01-22 00:08:52.652012423 +0000 UTC m=+110.688254335" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.653220 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=63.653212971 podStartE2EDuration="1m3.653212971s" podCreationTimestamp="2026-01-22 00:07:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.65160818 +0000 UTC m=+110.687850092" watchObservedRunningTime="2026-01-22 00:08:52.653212971 +0000 UTC m=+110.689454883" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.668475 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 02:37:02.311976389 +0000 UTC Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.689895 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podStartSLOduration=87.689861174 podStartE2EDuration="1m27.689861174s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.689062339 +0000 UTC m=+110.725304261" watchObservedRunningTime="2026-01-22 00:08:52.689861174 +0000 UTC m=+110.726103126" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.702422 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.702469 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.702486 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.702510 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.702527 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.711386 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=91.71135757 podStartE2EDuration="1m31.71135757s" podCreationTimestamp="2026-01-22 00:07:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.70847273 +0000 UTC m=+110.744714692" watchObservedRunningTime="2026-01-22 00:08:52.71135757 +0000 UTC m=+110.747599492" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.729147 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=88.72912151 podStartE2EDuration="1m28.72912151s" podCreationTimestamp="2026-01-22 00:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.728565802 +0000 UTC m=+110.764807744" watchObservedRunningTime="2026-01-22 00:08:52.72912151 +0000 UTC m=+110.765363432" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.756756 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-8d59c" podStartSLOduration=86.756733598 podStartE2EDuration="1m26.756733598s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.756014246 +0000 UTC m=+110.792256158" watchObservedRunningTime="2026-01-22 00:08:52.756733598 +0000 UTC m=+110.792975520" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.756984 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.757018 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.757028 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.757044 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.757055 4829 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T00:08:52Z","lastTransitionTime":"2026-01-22T00:08:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.808274 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-v62gj" podStartSLOduration=87.808258609 podStartE2EDuration="1m27.808258609s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.807099113 +0000 UTC m=+110.843341045" watchObservedRunningTime="2026-01-22 00:08:52.808258609 +0000 UTC m=+110.844500521" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.824424 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx"] Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.825192 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.828829 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.829176 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-knbr4" podStartSLOduration=87.829159147 podStartE2EDuration="1m27.829159147s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.828823107 +0000 UTC m=+110.865065039" watchObservedRunningTime="2026-01-22 00:08:52.829159147 +0000 UTC m=+110.865401059" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.829243 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.829638 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.831800 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.882837 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=89.882807736 podStartE2EDuration="1m29.882807736s" podCreationTimestamp="2026-01-22 00:07:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.868663331 +0000 UTC m=+110.904905273" watchObservedRunningTime="2026-01-22 00:08:52.882807736 +0000 UTC m=+110.919049648" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.903741 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-vgv2h" podStartSLOduration=87.903711193 podStartE2EDuration="1m27.903711193s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:52.902222807 +0000 UTC m=+110.938464719" watchObservedRunningTime="2026-01-22 00:08:52.903711193 +0000 UTC m=+110.939953115" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.926077 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/15a4060d-a312-43ef-9e85-a8d8ab576229-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.926130 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15a4060d-a312-43ef-9e85-a8d8ab576229-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.926156 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15a4060d-a312-43ef-9e85-a8d8ab576229-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.926197 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/15a4060d-a312-43ef-9e85-a8d8ab576229-service-ca\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:52 crc kubenswrapper[4829]: I0122 00:08:52.926220 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/15a4060d-a312-43ef-9e85-a8d8ab576229-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.027358 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15a4060d-a312-43ef-9e85-a8d8ab576229-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.027425 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/15a4060d-a312-43ef-9e85-a8d8ab576229-service-ca\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.027451 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/15a4060d-a312-43ef-9e85-a8d8ab576229-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.027513 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/15a4060d-a312-43ef-9e85-a8d8ab576229-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.027572 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15a4060d-a312-43ef-9e85-a8d8ab576229-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.027671 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/15a4060d-a312-43ef-9e85-a8d8ab576229-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.027685 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/15a4060d-a312-43ef-9e85-a8d8ab576229-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.028301 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/15a4060d-a312-43ef-9e85-a8d8ab576229-service-ca\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.033110 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/15a4060d-a312-43ef-9e85-a8d8ab576229-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.044634 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/15a4060d-a312-43ef-9e85-a8d8ab576229-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-82vbx\" (UID: \"15a4060d-a312-43ef-9e85-a8d8ab576229\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.138735 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.353359 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" event={"ID":"15a4060d-a312-43ef-9e85-a8d8ab576229","Type":"ContainerStarted","Data":"8eaaad1462182ac44139e9a13de47ac9c441ec2837e7aa39b0abfa0d50200672"} Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.353462 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" event={"ID":"15a4060d-a312-43ef-9e85-a8d8ab576229","Type":"ContainerStarted","Data":"07b6deb5935ad702c5c0d4690f3859dd0e9fd9c402546a071ac32299c9e59ac1"} Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.552845 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.552920 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:53 crc kubenswrapper[4829]: E0122 00:08:53.552998 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:53 crc kubenswrapper[4829]: E0122 00:08:53.553127 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.668733 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 22:23:25.949511937 +0000 UTC Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.669045 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 22 00:08:53 crc kubenswrapper[4829]: I0122 00:08:53.675842 4829 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 00:08:54 crc kubenswrapper[4829]: I0122 00:08:54.553010 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:54 crc kubenswrapper[4829]: E0122 00:08:54.553161 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:54 crc kubenswrapper[4829]: I0122 00:08:54.553014 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:54 crc kubenswrapper[4829]: E0122 00:08:54.553651 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:55 crc kubenswrapper[4829]: I0122 00:08:55.553362 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:55 crc kubenswrapper[4829]: I0122 00:08:55.553391 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:55 crc kubenswrapper[4829]: E0122 00:08:55.553494 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:55 crc kubenswrapper[4829]: E0122 00:08:55.553668 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:56 crc kubenswrapper[4829]: I0122 00:08:56.553619 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:56 crc kubenswrapper[4829]: I0122 00:08:56.553650 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:56 crc kubenswrapper[4829]: E0122 00:08:56.553857 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:56 crc kubenswrapper[4829]: E0122 00:08:56.554023 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:57 crc kubenswrapper[4829]: I0122 00:08:57.552727 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:57 crc kubenswrapper[4829]: I0122 00:08:57.552734 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:57 crc kubenswrapper[4829]: E0122 00:08:57.553203 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:57 crc kubenswrapper[4829]: E0122 00:08:57.553377 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:08:57 crc kubenswrapper[4829]: I0122 00:08:57.553595 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:08:57 crc kubenswrapper[4829]: E0122 00:08:57.553775 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:08:58 crc kubenswrapper[4829]: I0122 00:08:58.552834 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:08:58 crc kubenswrapper[4829]: I0122 00:08:58.552933 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:08:58 crc kubenswrapper[4829]: E0122 00:08:58.553023 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:08:58 crc kubenswrapper[4829]: E0122 00:08:58.553182 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:08:59 crc kubenswrapper[4829]: I0122 00:08:59.552978 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:08:59 crc kubenswrapper[4829]: I0122 00:08:59.553065 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:08:59 crc kubenswrapper[4829]: E0122 00:08:59.553121 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:08:59 crc kubenswrapper[4829]: E0122 00:08:59.553220 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.376426 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/1.log" Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.377593 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/0.log" Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.377669 4829 generic.go:334] "Generic (PLEG): container finished" podID="60f879f6-8b21-4e75-9a62-d372fec048e1" containerID="83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c" exitCode=1 Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.377713 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerDied","Data":"83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c"} Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.377760 4829 scope.go:117] "RemoveContainer" containerID="4065dc832a7c683a07cf574e68fe26d13b88a4d0bf30d22810e058a5451f048f" Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.378366 4829 scope.go:117] "RemoveContainer" containerID="83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c" Jan 22 00:09:00 crc kubenswrapper[4829]: E0122 00:09:00.378730 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-4ss4n_openshift-multus(60f879f6-8b21-4e75-9a62-d372fec048e1)\"" pod="openshift-multus/multus-4ss4n" podUID="60f879f6-8b21-4e75-9a62-d372fec048e1" Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.403859 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-82vbx" podStartSLOduration=95.403831891 podStartE2EDuration="1m35.403831891s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:08:53.374028083 +0000 UTC m=+111.410270005" watchObservedRunningTime="2026-01-22 00:09:00.403831891 +0000 UTC m=+118.440073803" Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.553053 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:00 crc kubenswrapper[4829]: I0122 00:09:00.553131 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:00 crc kubenswrapper[4829]: E0122 00:09:00.553250 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:00 crc kubenswrapper[4829]: E0122 00:09:00.562636 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:01 crc kubenswrapper[4829]: I0122 00:09:01.383697 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/1.log" Jan 22 00:09:01 crc kubenswrapper[4829]: I0122 00:09:01.553455 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:01 crc kubenswrapper[4829]: I0122 00:09:01.553462 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:01 crc kubenswrapper[4829]: E0122 00:09:01.553753 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:01 crc kubenswrapper[4829]: E0122 00:09:01.553791 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:02 crc kubenswrapper[4829]: E0122 00:09:02.483126 4829 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 22 00:09:02 crc kubenswrapper[4829]: I0122 00:09:02.552940 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:02 crc kubenswrapper[4829]: I0122 00:09:02.553011 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:02 crc kubenswrapper[4829]: E0122 00:09:02.554437 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:02 crc kubenswrapper[4829]: E0122 00:09:02.554570 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:02 crc kubenswrapper[4829]: E0122 00:09:02.673843 4829 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 00:09:03 crc kubenswrapper[4829]: I0122 00:09:03.553253 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:03 crc kubenswrapper[4829]: I0122 00:09:03.553253 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:03 crc kubenswrapper[4829]: E0122 00:09:03.553454 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:03 crc kubenswrapper[4829]: E0122 00:09:03.553647 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:04 crc kubenswrapper[4829]: I0122 00:09:04.552864 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:04 crc kubenswrapper[4829]: E0122 00:09:04.553149 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:04 crc kubenswrapper[4829]: I0122 00:09:04.553457 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:04 crc kubenswrapper[4829]: E0122 00:09:04.553633 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:05 crc kubenswrapper[4829]: I0122 00:09:05.552721 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:05 crc kubenswrapper[4829]: I0122 00:09:05.552749 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:05 crc kubenswrapper[4829]: E0122 00:09:05.552938 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:05 crc kubenswrapper[4829]: E0122 00:09:05.553095 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:06 crc kubenswrapper[4829]: I0122 00:09:06.552799 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:06 crc kubenswrapper[4829]: E0122 00:09:06.552980 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:06 crc kubenswrapper[4829]: I0122 00:09:06.553241 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:06 crc kubenswrapper[4829]: E0122 00:09:06.553685 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:07 crc kubenswrapper[4829]: I0122 00:09:07.553248 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:07 crc kubenswrapper[4829]: I0122 00:09:07.553319 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:07 crc kubenswrapper[4829]: E0122 00:09:07.553384 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:07 crc kubenswrapper[4829]: E0122 00:09:07.553473 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:07 crc kubenswrapper[4829]: E0122 00:09:07.674849 4829 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 00:09:08 crc kubenswrapper[4829]: I0122 00:09:08.553304 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:08 crc kubenswrapper[4829]: I0122 00:09:08.553357 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:08 crc kubenswrapper[4829]: E0122 00:09:08.553504 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:08 crc kubenswrapper[4829]: E0122 00:09:08.553659 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:08 crc kubenswrapper[4829]: I0122 00:09:08.555948 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:09:08 crc kubenswrapper[4829]: E0122 00:09:08.556480 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fd6j8_openshift-ovn-kubernetes(7df1ca93-0e8f-4f06-8b8f-2297a8dbb340)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" Jan 22 00:09:09 crc kubenswrapper[4829]: I0122 00:09:09.553337 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:09 crc kubenswrapper[4829]: E0122 00:09:09.553642 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:09 crc kubenswrapper[4829]: I0122 00:09:09.553360 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:09 crc kubenswrapper[4829]: E0122 00:09:09.554049 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:10 crc kubenswrapper[4829]: I0122 00:09:10.552794 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:10 crc kubenswrapper[4829]: I0122 00:09:10.552856 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:10 crc kubenswrapper[4829]: E0122 00:09:10.553018 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:10 crc kubenswrapper[4829]: E0122 00:09:10.553142 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:11 crc kubenswrapper[4829]: I0122 00:09:11.553246 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:11 crc kubenswrapper[4829]: I0122 00:09:11.553373 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:11 crc kubenswrapper[4829]: E0122 00:09:11.553454 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:11 crc kubenswrapper[4829]: I0122 00:09:11.553628 4829 scope.go:117] "RemoveContainer" containerID="83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c" Jan 22 00:09:11 crc kubenswrapper[4829]: E0122 00:09:11.553690 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:12 crc kubenswrapper[4829]: I0122 00:09:12.427249 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/1.log" Jan 22 00:09:12 crc kubenswrapper[4829]: I0122 00:09:12.427680 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerStarted","Data":"12d5f08bdea530d824af56e3874c32cf12d50fe29bbc262f27f839089044880d"} Jan 22 00:09:12 crc kubenswrapper[4829]: I0122 00:09:12.553386 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:12 crc kubenswrapper[4829]: I0122 00:09:12.553410 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:12 crc kubenswrapper[4829]: E0122 00:09:12.555045 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:12 crc kubenswrapper[4829]: E0122 00:09:12.555145 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:12 crc kubenswrapper[4829]: E0122 00:09:12.676288 4829 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 00:09:13 crc kubenswrapper[4829]: I0122 00:09:13.553343 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:13 crc kubenswrapper[4829]: I0122 00:09:13.553343 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:13 crc kubenswrapper[4829]: E0122 00:09:13.553649 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:13 crc kubenswrapper[4829]: E0122 00:09:13.553709 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:14 crc kubenswrapper[4829]: I0122 00:09:14.553230 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:14 crc kubenswrapper[4829]: I0122 00:09:14.553308 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:14 crc kubenswrapper[4829]: E0122 00:09:14.553482 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:14 crc kubenswrapper[4829]: E0122 00:09:14.553695 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:15 crc kubenswrapper[4829]: I0122 00:09:15.552752 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:15 crc kubenswrapper[4829]: I0122 00:09:15.552753 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:15 crc kubenswrapper[4829]: E0122 00:09:15.552945 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:15 crc kubenswrapper[4829]: E0122 00:09:15.553067 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:16 crc kubenswrapper[4829]: I0122 00:09:16.552511 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:16 crc kubenswrapper[4829]: E0122 00:09:16.552732 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:16 crc kubenswrapper[4829]: I0122 00:09:16.552889 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:16 crc kubenswrapper[4829]: E0122 00:09:16.553015 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:17 crc kubenswrapper[4829]: I0122 00:09:17.553639 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:17 crc kubenswrapper[4829]: E0122 00:09:17.553792 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:17 crc kubenswrapper[4829]: I0122 00:09:17.553898 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:17 crc kubenswrapper[4829]: E0122 00:09:17.554100 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:17 crc kubenswrapper[4829]: E0122 00:09:17.677461 4829 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 00:09:18 crc kubenswrapper[4829]: I0122 00:09:18.553231 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:18 crc kubenswrapper[4829]: I0122 00:09:18.553300 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:18 crc kubenswrapper[4829]: E0122 00:09:18.553430 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:18 crc kubenswrapper[4829]: E0122 00:09:18.553514 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:19 crc kubenswrapper[4829]: I0122 00:09:19.553447 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:19 crc kubenswrapper[4829]: I0122 00:09:19.553537 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:19 crc kubenswrapper[4829]: E0122 00:09:19.553677 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:19 crc kubenswrapper[4829]: E0122 00:09:19.553814 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:20 crc kubenswrapper[4829]: I0122 00:09:20.553432 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:20 crc kubenswrapper[4829]: I0122 00:09:20.553476 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:20 crc kubenswrapper[4829]: E0122 00:09:20.553690 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:20 crc kubenswrapper[4829]: E0122 00:09:20.553881 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:21 crc kubenswrapper[4829]: I0122 00:09:21.553277 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:21 crc kubenswrapper[4829]: I0122 00:09:21.553304 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:21 crc kubenswrapper[4829]: E0122 00:09:21.553517 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:21 crc kubenswrapper[4829]: E0122 00:09:21.553636 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:22 crc kubenswrapper[4829]: I0122 00:09:22.553035 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:22 crc kubenswrapper[4829]: I0122 00:09:22.553117 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:22 crc kubenswrapper[4829]: E0122 00:09:22.554940 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:22 crc kubenswrapper[4829]: E0122 00:09:22.555115 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:22 crc kubenswrapper[4829]: E0122 00:09:22.678513 4829 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 00:09:23 crc kubenswrapper[4829]: I0122 00:09:23.553605 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:23 crc kubenswrapper[4829]: I0122 00:09:23.553664 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:23 crc kubenswrapper[4829]: E0122 00:09:23.553802 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:23 crc kubenswrapper[4829]: E0122 00:09:23.553926 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:23 crc kubenswrapper[4829]: I0122 00:09:23.554707 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:09:24 crc kubenswrapper[4829]: I0122 00:09:24.553285 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:24 crc kubenswrapper[4829]: E0122 00:09:24.553447 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:24 crc kubenswrapper[4829]: I0122 00:09:24.553500 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:24 crc kubenswrapper[4829]: E0122 00:09:24.553629 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:25 crc kubenswrapper[4829]: I0122 00:09:25.478395 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/3.log" Jan 22 00:09:25 crc kubenswrapper[4829]: I0122 00:09:25.480821 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerStarted","Data":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} Jan 22 00:09:25 crc kubenswrapper[4829]: I0122 00:09:25.482056 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:09:25 crc kubenswrapper[4829]: I0122 00:09:25.508124 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podStartSLOduration=120.508099608 podStartE2EDuration="2m0.508099608s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:25.506434726 +0000 UTC m=+143.542676658" watchObservedRunningTime="2026-01-22 00:09:25.508099608 +0000 UTC m=+143.544341560" Jan 22 00:09:25 crc kubenswrapper[4829]: I0122 00:09:25.552835 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:25 crc kubenswrapper[4829]: I0122 00:09:25.552874 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:25 crc kubenswrapper[4829]: E0122 00:09:25.552983 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:25 crc kubenswrapper[4829]: E0122 00:09:25.553175 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:25 crc kubenswrapper[4829]: I0122 00:09:25.594290 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-c82dd"] Jan 22 00:09:26 crc kubenswrapper[4829]: I0122 00:09:26.487045 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:26 crc kubenswrapper[4829]: E0122 00:09:26.487261 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-c82dd" podUID="74beaade-c8f6-4d34-842b-1c03fe72b195" Jan 22 00:09:26 crc kubenswrapper[4829]: I0122 00:09:26.553370 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:26 crc kubenswrapper[4829]: I0122 00:09:26.553442 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:26 crc kubenswrapper[4829]: E0122 00:09:26.553508 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 00:09:26 crc kubenswrapper[4829]: E0122 00:09:26.553636 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:09:27 crc kubenswrapper[4829]: I0122 00:09:27.553217 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:27 crc kubenswrapper[4829]: E0122 00:09:27.553350 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.528121 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.528238 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528294 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:11:30.528260978 +0000 UTC m=+268.564502900 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528348 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528365 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.528359 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528378 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528422 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:11:30.528408663 +0000 UTC m=+268.564650575 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528588 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528628 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528642 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:09:28 crc kubenswrapper[4829]: E0122 00:09:28.528702 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:11:30.528682552 +0000 UTC m=+268.564924544 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.553239 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.553333 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.553264 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.556476 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.556902 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.556917 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.557207 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.558108 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.559814 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.629866 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.629904 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.634984 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.832174 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:28 crc kubenswrapper[4829]: I0122 00:09:28.898934 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 00:09:29 crc kubenswrapper[4829]: I0122 00:09:29.496650 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c1a6771ac09f19e3122922cfa574aa55e0da4c737bb36baec7f1baa78d8aab62"} Jan 22 00:09:29 crc kubenswrapper[4829]: I0122 00:09:29.497606 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d24975f7a2eea975cb2be420050bc43ec085f216fbfadb672c8cb83692fab133"} Jan 22 00:09:29 crc kubenswrapper[4829]: I0122 00:09:29.553063 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.780011 4829 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.848469 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gwk42"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.849440 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.852902 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.853291 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.853627 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.868965 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.869230 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.869765 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.869806 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.872113 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.873602 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dj87x"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.875626 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.875823 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.882610 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.883468 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.883744 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.884023 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.886010 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.886385 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-q9d5h"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.886931 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.891196 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.891881 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-8tq29"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.892188 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.892342 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.892400 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.892587 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.892654 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.892810 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.892354 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.893006 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.893025 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.893163 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.893303 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.893380 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.894947 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.895008 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-wgx7d"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.895762 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.896652 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.897152 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.897312 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29484000-7gpqp"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.897800 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.898319 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.898683 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.900377 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.901355 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.901406 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.901616 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.901809 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.901838 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-c47sd"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.901911 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902053 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902177 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902273 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902372 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902415 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902418 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-26xkj"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902469 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.902596 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.903047 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.903182 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.903482 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.904155 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.904428 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.904640 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.904749 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.906421 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.906528 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.906641 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.906787 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.906889 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.907132 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.907181 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nxg4z"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.906591 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.910150 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.911737 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-v546c"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.917685 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.918119 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.918428 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.918619 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.921784 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.921862 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.921892 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-audit-dir\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.921927 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-config\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.921948 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.921972 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.921991 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-audit-policies\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922022 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922048 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-encryption-config\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922108 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7n75\" (UniqueName: \"kubernetes.io/projected/fc463c00-fc43-4cdc-8ff0-a5026aba9539-kube-api-access-j7n75\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922150 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-config\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922175 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922203 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-config\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922225 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/72778ee0-ec95-4ab0-867c-1997b47449f5-node-pullsecrets\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922266 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbmp5\" (UniqueName: \"kubernetes.io/projected/7f619c30-40fb-46a4-956e-366f2192703e-kube-api-access-kbmp5\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922304 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-client-ca\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922428 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922470 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922495 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb4gv\" (UniqueName: \"kubernetes.io/projected/fb1dda78-6284-441f-9239-1e9f81282032-kube-api-access-xb4gv\") pod \"image-pruner-29484000-7gpqp\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922524 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfcwq\" (UniqueName: \"kubernetes.io/projected/72778ee0-ec95-4ab0-867c-1997b47449f5-kube-api-access-lfcwq\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922608 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-config\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922645 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2ef479a2-183c-40f8-8bd7-e974de5a5305-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jqdzd\" (UID: \"2ef479a2-183c-40f8-8bd7-e974de5a5305\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922678 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922707 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-etcd-client\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922725 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njjvv\" (UniqueName: \"kubernetes.io/projected/0e60009d-5985-47f9-b164-32cf604c23fa-kube-api-access-njjvv\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922754 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922780 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922885 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922911 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1166d09-4d81-47db-803f-316f64bac8a7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922943 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-image-import-ca\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922962 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7f619c30-40fb-46a4-956e-366f2192703e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.922983 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-etcd-client\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923053 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923111 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fb1dda78-6284-441f-9239-1e9f81282032-serviceca\") pod \"image-pruner-29484000-7gpqp\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923136 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/72778ee0-ec95-4ab0-867c-1997b47449f5-audit-dir\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923150 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-client-ca\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923165 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc463c00-fc43-4cdc-8ff0-a5026aba9539-serving-cert\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923187 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-machine-approver-tls\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923244 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2594295b-8073-45b0-8006-f7276c580e6d-serving-cert\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923264 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-serving-cert\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923288 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2594295b-8073-45b0-8006-f7276c580e6d-config\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923313 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt8r8\" (UniqueName: \"kubernetes.io/projected/d1166d09-4d81-47db-803f-316f64bac8a7-kube-api-access-mt8r8\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923361 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2b61594-3e3c-4445-9308-436bd10952c0-serving-cert\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923390 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923381 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923424 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-service-ca-bundle\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923443 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlq6g\" (UniqueName: \"kubernetes.io/projected/de89451c-cbc7-401d-ab19-f4ea8916fcb5-kube-api-access-hlq6g\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.923526 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvb9f\" (UniqueName: \"kubernetes.io/projected/9447fd29-4eae-4299-b266-1f5236931aee-kube-api-access-bvb9f\") pod \"downloads-7954f5f757-8tq29\" (UID: \"9447fd29-4eae-4299-b266-1f5236931aee\") " pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.925092 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.925274 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.929361 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.929872 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.930119 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.930651 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.931940 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.932158 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.932356 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.932573 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.932884 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.933133 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.933321 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.933577 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.933992 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.934171 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.934512 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.934951 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.935009 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.935827 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.937739 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.937956 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f619c30-40fb-46a4-956e-366f2192703e-serving-cert\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938014 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938082 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-serving-cert\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938114 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938172 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-etcd-serving-ca\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938208 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7r99\" (UniqueName: \"kubernetes.io/projected/2ef479a2-183c-40f8-8bd7-e974de5a5305-kube-api-access-m7r99\") pod \"cluster-samples-operator-665b6dd947-jqdzd\" (UID: \"2ef479a2-183c-40f8-8bd7-e974de5a5305\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938254 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2594295b-8073-45b0-8006-f7276c580e6d-trusted-ca\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938285 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938316 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwph9\" (UniqueName: \"kubernetes.io/projected/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-kube-api-access-hwph9\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938344 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf9rw\" (UniqueName: \"kubernetes.io/projected/d2b61594-3e3c-4445-9308-436bd10952c0-kube-api-access-nf9rw\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938384 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938446 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g8h8\" (UniqueName: \"kubernetes.io/projected/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-kube-api-access-4g8h8\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938484 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-auth-proxy-config\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938519 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e60009d-5985-47f9-b164-32cf604c23fa-serving-cert\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938562 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-policies\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938594 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938665 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-779kf\" (UniqueName: \"kubernetes.io/projected/2594295b-8073-45b0-8006-f7276c580e6d-kube-api-access-779kf\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938713 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-encryption-config\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938741 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-dir\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938745 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938773 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938806 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1166d09-4d81-47db-803f-316f64bac8a7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.938844 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-audit\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.941034 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b7bb\" (UniqueName: \"kubernetes.io/projected/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-kube-api-access-8b7bb\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.941098 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-config\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.941154 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-2fdtt"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.942721 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.942724 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-z9x4d"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.963359 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.963932 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.964153 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.965424 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.965974 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.966160 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.966776 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.969039 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970001 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970073 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970146 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970398 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q9s2c"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970488 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970655 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970722 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970839 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.970990 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.971225 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.971335 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.971873 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6ltv9"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.972253 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.972265 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.972411 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.972505 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.972655 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.972688 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.972885 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.976106 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.977995 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.978633 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.978991 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.979123 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.981822 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t695d"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.982804 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.983338 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.983428 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-6l5hd"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.983649 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.984180 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.987940 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.988223 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.988334 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.988532 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.988604 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.992863 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.994461 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.994684 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.995013 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.995192 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc"] Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.995443 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.995476 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.995902 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.996010 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.996093 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.996270 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 00:09:33 crc kubenswrapper[4829]: I0122 00:09:33.996401 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:33.997078 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:33.998254 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:33.999979 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.000455 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.008029 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.010088 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.013087 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.014028 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.014863 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.018808 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.021177 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w7dnn"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.024760 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.028806 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.028875 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.030424 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qxrp6"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.031193 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.031706 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.031988 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.032871 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.033758 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.034408 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.036631 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dj87x"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.039161 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.039655 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.041277 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.041836 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.041947 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-encryption-config\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.041994 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-dir\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.042025 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkmpv\" (UniqueName: \"kubernetes.io/projected/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-kube-api-access-xkmpv\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.042048 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6689\" (UniqueName: \"kubernetes.io/projected/dad2b02a-4000-44b8-acad-460b5a394c42-kube-api-access-s6689\") pod \"migrator-59844c95c7-zkmjd\" (UID: \"dad2b02a-4000-44b8-acad-460b5a394c42\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.042068 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.042093 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-config\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.042111 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-6l5hd\" (UID: \"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.042134 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.042158 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7n75\" (UniqueName: \"kubernetes.io/projected/fc463c00-fc43-4cdc-8ff0-a5026aba9539-kube-api-access-j7n75\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.043388 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-dir\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.043380 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.043390 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.043598 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/684b4808-40a6-495b-a8cf-7d48d54982bb-proxy-tls\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.043981 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/72778ee0-ec95-4ab0-867c-1997b47449f5-node-pullsecrets\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044030 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-client-ca\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044084 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2321c1dc-0a76-4b24-aed3-cd67eaf23486-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044130 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87dhn\" (UniqueName: \"kubernetes.io/projected/a6356f84-ed83-468e-b825-808d0aa2c7d4-kube-api-access-87dhn\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044153 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjmz5\" (UniqueName: \"kubernetes.io/projected/3127e466-6f03-48c9-8d8b-6de53678192c-kube-api-access-fjmz5\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044172 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81acce8c-d724-494b-bdfd-df88546f6ac6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044223 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/72778ee0-ec95-4ab0-867c-1997b47449f5-node-pullsecrets\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044223 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044379 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044432 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb4gv\" (UniqueName: \"kubernetes.io/projected/fb1dda78-6284-441f-9239-1e9f81282032-kube-api-access-xb4gv\") pod \"image-pruner-29484000-7gpqp\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044468 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-metrics-certs\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044492 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90f39ea3-632a-4845-b3b9-6ed24a762baa-config\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044512 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nxg4z"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044751 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn6t7\" (UniqueName: \"kubernetes.io/projected/81acce8c-d724-494b-bdfd-df88546f6ac6-kube-api-access-pn6t7\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044800 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044824 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044848 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-config\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.044980 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3916e963-6bce-4316-b02e-98b5565e8615-config\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045014 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3127e466-6f03-48c9-8d8b-6de53678192c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045022 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-client-ca\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045100 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045147 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81acce8c-d724-494b-bdfd-df88546f6ac6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045274 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpnpt\" (UniqueName: \"kubernetes.io/projected/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-kube-api-access-zpnpt\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045310 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-oauth-config\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045336 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-oauth-serving-cert\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045369 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-etcd-client\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045374 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045400 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7f619c30-40fb-46a4-956e-366f2192703e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045515 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-client-ca\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.045604 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc463c00-fc43-4cdc-8ff0-a5026aba9539-serving-cert\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046063 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/7f619c30-40fb-46a4-956e-366f2192703e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046122 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-cbmj7"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046687 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-client-ca\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046779 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-machine-approver-tls\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046830 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046888 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046946 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2594295b-8073-45b0-8006-f7276c580e6d-config\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.046984 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f5d785d5-3f71-495f-99f0-fd89b646aec9-metrics-tls\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047014 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-ca\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047045 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2b61594-3e3c-4445-9308-436bd10952c0-serving-cert\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047072 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047100 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-service-ca-bundle\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047135 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2321c1dc-0a76-4b24-aed3-cd67eaf23486-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047162 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txw9z\" (UniqueName: \"kubernetes.io/projected/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-kube-api-access-txw9z\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047187 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/db0366e2-9560-4f1a-949c-66d4ddc09b89-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cvqvz\" (UID: \"db0366e2-9560-4f1a-949c-66d4ddc09b89\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047220 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-config\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049315 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.048015 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2594295b-8073-45b0-8006-f7276c580e6d-config\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.048343 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-wgx7d"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049277 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-service-ca-bundle\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049321 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlq6g\" (UniqueName: \"kubernetes.io/projected/de89451c-cbc7-401d-ab19-f4ea8916fcb5-kube-api-access-hlq6g\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049663 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f619c30-40fb-46a4-956e-366f2192703e-serving-cert\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049751 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bd5d245d-600c-4790-89b2-3867c9e6279f-profile-collector-cert\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049790 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj2lg\" (UniqueName: \"kubernetes.io/projected/7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2-kube-api-access-hj2lg\") pod \"multus-admission-controller-857f4d67dd-6l5hd\" (UID: \"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049857 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwph9\" (UniqueName: \"kubernetes.io/projected/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-kube-api-access-hwph9\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049889 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf9rw\" (UniqueName: \"kubernetes.io/projected/d2b61594-3e3c-4445-9308-436bd10952c0-kube-api-access-nf9rw\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051196 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2594295b-8073-45b0-8006-f7276c580e6d-trusted-ca\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051221 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051243 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-serving-cert\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051266 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5d785d5-3f71-495f-99f0-fd89b646aec9-trusted-ca\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051286 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g8h8\" (UniqueName: \"kubernetes.io/projected/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-kube-api-access-4g8h8\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051306 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6swsg\" (UniqueName: \"kubernetes.io/projected/684b4808-40a6-495b-a8cf-7d48d54982bb-kube-api-access-6swsg\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051323 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bd5d245d-600c-4790-89b2-3867c9e6279f-srv-cert\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051344 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90f39ea3-632a-4845-b3b9-6ed24a762baa-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051376 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-tmpfs\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051394 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tz99n\" (UniqueName: \"kubernetes.io/projected/bd5d245d-600c-4790-89b2-3867c9e6279f-kube-api-access-tz99n\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051415 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-779kf\" (UniqueName: \"kubernetes.io/projected/2594295b-8073-45b0-8006-f7276c580e6d-kube-api-access-779kf\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051435 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051457 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051475 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1166d09-4d81-47db-803f-316f64bac8a7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051494 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/52038e8d-9971-49b4-86fb-8062c0eea326-metrics-tls\") pod \"dns-operator-744455d44c-v546c\" (UID: \"52038e8d-9971-49b4-86fb-8062c0eea326\") " pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051514 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-audit\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051556 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b7bb\" (UniqueName: \"kubernetes.io/projected/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-kube-api-access-8b7bb\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051582 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-config\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051617 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051636 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-audit-dir\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051659 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051676 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-audit-policies\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051693 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051712 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-encryption-config\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051730 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-stats-auth\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051747 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-config\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051768 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-config\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051787 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/684b4808-40a6-495b-a8cf-7d48d54982bb-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051818 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbmp5\" (UniqueName: \"kubernetes.io/projected/7f619c30-40fb-46a4-956e-366f2192703e-kube-api-access-kbmp5\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051834 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-service-ca\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051882 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051898 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-apiservice-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051913 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5d785d5-3f71-495f-99f0-fd89b646aec9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051951 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfcwq\" (UniqueName: \"kubernetes.io/projected/72778ee0-ec95-4ab0-867c-1997b47449f5-kube-api-access-lfcwq\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051968 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-config\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051985 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2ef479a2-183c-40f8-8bd7-e974de5a5305-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jqdzd\" (UID: \"2ef479a2-183c-40f8-8bd7-e974de5a5305\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052002 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5hfp\" (UniqueName: \"kubernetes.io/projected/52038e8d-9971-49b4-86fb-8062c0eea326-kube-api-access-g5hfp\") pod \"dns-operator-744455d44c-v546c\" (UID: \"52038e8d-9971-49b4-86fb-8062c0eea326\") " pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052023 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-etcd-client\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052039 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njjvv\" (UniqueName: \"kubernetes.io/projected/0e60009d-5985-47f9-b164-32cf604c23fa-kube-api-access-njjvv\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052078 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-default-certificate\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052097 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-client\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052119 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1166d09-4d81-47db-803f-316f64bac8a7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052136 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-service-ca\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052150 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-config\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052166 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3127e466-6f03-48c9-8d8b-6de53678192c-images\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052186 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052203 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-image-import-ca\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052224 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fb1dda78-6284-441f-9239-1e9f81282032-serviceca\") pod \"image-pruner-29484000-7gpqp\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052226 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-audit-dir\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052239 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-trusted-ca-bundle\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052304 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-serving-cert\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052334 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/72778ee0-ec95-4ab0-867c-1997b47449f5-audit-dir\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052366 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2321c1dc-0a76-4b24-aed3-cd67eaf23486-config\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052395 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhq4m\" (UniqueName: \"kubernetes.io/projected/f5d785d5-3f71-495f-99f0-fd89b646aec9-kube-api-access-zhq4m\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052420 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2594295b-8073-45b0-8006-f7276c580e6d-serving-cert\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052443 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-serving-cert\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052468 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt8r8\" (UniqueName: \"kubernetes.io/projected/d1166d09-4d81-47db-803f-316f64bac8a7-kube-api-access-mt8r8\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052490 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-service-ca-bundle\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052513 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-webhook-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052534 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3916e963-6bce-4316-b02e-98b5565e8615-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052575 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90f39ea3-632a-4845-b3b9-6ed24a762baa-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052598 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052625 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvb9f\" (UniqueName: \"kubernetes.io/projected/9447fd29-4eae-4299-b266-1f5236931aee-kube-api-access-bvb9f\") pod \"downloads-7954f5f757-8tq29\" (UID: \"9447fd29-4eae-4299-b266-1f5236931aee\") " pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052647 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052669 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8skhl\" (UniqueName: \"kubernetes.io/projected/db0366e2-9560-4f1a-949c-66d4ddc09b89-kube-api-access-8skhl\") pod \"control-plane-machine-set-operator-78cbb6b69f-cvqvz\" (UID: \"db0366e2-9560-4f1a-949c-66d4ddc09b89\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052695 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-serving-cert\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052717 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-etcd-serving-ca\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052738 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7r99\" (UniqueName: \"kubernetes.io/projected/2ef479a2-183c-40f8-8bd7-e974de5a5305-kube-api-access-m7r99\") pod \"cluster-samples-operator-665b6dd947-jqdzd\" (UID: \"2ef479a2-183c-40f8-8bd7-e974de5a5305\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052763 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3127e466-6f03-48c9-8d8b-6de53678192c-proxy-tls\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052791 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4vfg\" (UniqueName: \"kubernetes.io/projected/3916e963-6bce-4316-b02e-98b5565e8615-kube-api-access-w4vfg\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052815 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052843 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-auth-proxy-config\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052880 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e60009d-5985-47f9-b164-32cf604c23fa-serving-cert\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052902 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-policies\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052923 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3916e963-6bce-4316-b02e-98b5565e8615-images\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049922 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-machine-approver-tls\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.053041 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.053504 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.049519 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.053715 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.047723 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-encryption-config\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.050490 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-etcd-client\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.050919 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.051149 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.054058 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2594295b-8073-45b0-8006-f7276c580e6d-trusted-ca\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.054131 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/72778ee0-ec95-4ab0-867c-1997b47449f5-audit-dir\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.055076 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc463c00-fc43-4cdc-8ff0-a5026aba9539-serving-cert\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.056103 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.056222 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-config\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.056859 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1166d09-4d81-47db-803f-316f64bac8a7-config\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.057515 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fb1dda78-6284-441f-9239-1e9f81282032-serviceca\") pod \"image-pruner-29484000-7gpqp\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.057771 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-image-import-ca\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.058054 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2b61594-3e3c-4445-9308-436bd10952c0-config\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.052740 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-audit-policies\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.062773 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1166d09-4d81-47db-803f-316f64bac8a7-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.066571 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.071807 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.075299 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-encryption-config\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.075418 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-v546c"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.075910 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2b61594-3e3c-4445-9308-436bd10952c0-serving-cert\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.078149 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-config\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.078731 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.079123 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-etcd-serving-ca\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.080789 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-policies\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.082056 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-config\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.082456 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.082460 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.083233 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.083478 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/72778ee0-ec95-4ab0-867c-1997b47449f5-audit\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.085876 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f619c30-40fb-46a4-956e-366f2192703e-serving-cert\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.086321 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.087495 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.088999 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.089087 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-c47sd"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.089592 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-auth-proxy-config\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.094226 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.101160 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e60009d-5985-47f9-b164-32cf604c23fa-serving-cert\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.101495 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-serving-cert\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.101824 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.102150 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-serving-cert\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.103580 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2594295b-8073-45b0-8006-f7276c580e6d-serving-cert\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.103716 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2ef479a2-183c-40f8-8bd7-e974de5a5305-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-jqdzd\" (UID: \"2ef479a2-183c-40f8-8bd7-e974de5a5305\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.104282 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-q9d5h"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.105396 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8tq29"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.106429 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.112564 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.117925 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t695d"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.117984 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.117993 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gwk42"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.127675 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6ltv9"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.128526 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.132970 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.137267 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/72778ee0-ec95-4ab0-867c-1997b47449f5-etcd-client\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.138444 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.139697 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qxrp6"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.140731 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.141773 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.143065 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-6l5hd"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.149074 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.153444 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.153998 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3916e963-6bce-4316-b02e-98b5565e8615-images\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154061 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkmpv\" (UniqueName: \"kubernetes.io/projected/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-kube-api-access-xkmpv\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154095 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6689\" (UniqueName: \"kubernetes.io/projected/dad2b02a-4000-44b8-acad-460b5a394c42-kube-api-access-s6689\") pod \"migrator-59844c95c7-zkmjd\" (UID: \"dad2b02a-4000-44b8-acad-460b5a394c42\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154180 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-6l5hd\" (UID: \"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154227 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/684b4808-40a6-495b-a8cf-7d48d54982bb-proxy-tls\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154268 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87dhn\" (UniqueName: \"kubernetes.io/projected/a6356f84-ed83-468e-b825-808d0aa2c7d4-kube-api-access-87dhn\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154301 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjmz5\" (UniqueName: \"kubernetes.io/projected/3127e466-6f03-48c9-8d8b-6de53678192c-kube-api-access-fjmz5\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154335 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81acce8c-d724-494b-bdfd-df88546f6ac6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154386 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2321c1dc-0a76-4b24-aed3-cd67eaf23486-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154433 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-metrics-certs\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154472 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90f39ea3-632a-4845-b3b9-6ed24a762baa-config\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154515 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn6t7\" (UniqueName: \"kubernetes.io/projected/81acce8c-d724-494b-bdfd-df88546f6ac6-kube-api-access-pn6t7\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154591 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3916e963-6bce-4316-b02e-98b5565e8615-config\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154623 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3127e466-6f03-48c9-8d8b-6de53678192c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154658 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpnpt\" (UniqueName: \"kubernetes.io/projected/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-kube-api-access-zpnpt\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154700 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81acce8c-d724-494b-bdfd-df88546f6ac6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154733 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-oauth-config\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154775 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-oauth-serving-cert\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154844 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f5d785d5-3f71-495f-99f0-fd89b646aec9-metrics-tls\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154875 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-ca\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154926 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2321c1dc-0a76-4b24-aed3-cd67eaf23486-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.154957 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txw9z\" (UniqueName: \"kubernetes.io/projected/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-kube-api-access-txw9z\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155000 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/db0366e2-9560-4f1a-949c-66d4ddc09b89-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cvqvz\" (UID: \"db0366e2-9560-4f1a-949c-66d4ddc09b89\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155023 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3916e963-6bce-4316-b02e-98b5565e8615-images\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155058 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-config\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155594 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj2lg\" (UniqueName: \"kubernetes.io/projected/7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2-kube-api-access-hj2lg\") pod \"multus-admission-controller-857f4d67dd-6l5hd\" (UID: \"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155741 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bd5d245d-600c-4790-89b2-3867c9e6279f-profile-collector-cert\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155816 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5d785d5-3f71-495f-99f0-fd89b646aec9-trusted-ca\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155840 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-serving-cert\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155864 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bd5d245d-600c-4790-89b2-3867c9e6279f-srv-cert\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155881 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90f39ea3-632a-4845-b3b9-6ed24a762baa-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.155979 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6swsg\" (UniqueName: \"kubernetes.io/projected/684b4808-40a6-495b-a8cf-7d48d54982bb-kube-api-access-6swsg\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156028 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-tmpfs\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156329 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tz99n\" (UniqueName: \"kubernetes.io/projected/bd5d245d-600c-4790-89b2-3867c9e6279f-kube-api-access-tz99n\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156405 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/52038e8d-9971-49b4-86fb-8062c0eea326-metrics-tls\") pod \"dns-operator-744455d44c-v546c\" (UID: \"52038e8d-9971-49b4-86fb-8062c0eea326\") " pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156711 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-stats-auth\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156784 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/684b4808-40a6-495b-a8cf-7d48d54982bb-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156838 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-service-ca\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156933 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-apiservice-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.156993 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5d785d5-3f71-495f-99f0-fd89b646aec9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157006 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-tmpfs\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157533 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5hfp\" (UniqueName: \"kubernetes.io/projected/52038e8d-9971-49b4-86fb-8062c0eea326-kube-api-access-g5hfp\") pod \"dns-operator-744455d44c-v546c\" (UID: \"52038e8d-9971-49b4-86fb-8062c0eea326\") " pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157630 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-default-certificate\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157670 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-client\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157719 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-config\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157751 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3127e466-6f03-48c9-8d8b-6de53678192c-images\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157779 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-service-ca\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.157816 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-trusted-ca-bundle\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.158023 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/684b4808-40a6-495b-a8cf-7d48d54982bb-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.158240 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-serving-cert\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.158457 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2321c1dc-0a76-4b24-aed3-cd67eaf23486-config\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.158580 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3127e466-6f03-48c9-8d8b-6de53678192c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.158590 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhq4m\" (UniqueName: \"kubernetes.io/projected/f5d785d5-3f71-495f-99f0-fd89b646aec9-kube-api-access-zhq4m\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.158661 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-service-ca-bundle\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.158861 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-webhook-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.159352 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3916e963-6bce-4316-b02e-98b5565e8615-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.159433 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/81acce8c-d724-494b-bdfd-df88546f6ac6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.159472 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90f39ea3-632a-4845-b3b9-6ed24a762baa-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.159743 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.159961 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8skhl\" (UniqueName: \"kubernetes.io/projected/db0366e2-9560-4f1a-949c-66d4ddc09b89-kube-api-access-8skhl\") pod \"control-plane-machine-set-operator-78cbb6b69f-cvqvz\" (UID: \"db0366e2-9560-4f1a-949c-66d4ddc09b89\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.160027 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3127e466-6f03-48c9-8d8b-6de53678192c-proxy-tls\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.160466 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-service-ca-bundle\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.160483 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4vfg\" (UniqueName: \"kubernetes.io/projected/3916e963-6bce-4316-b02e-98b5565e8615-kube-api-access-w4vfg\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.162491 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f5d785d5-3f71-495f-99f0-fd89b646aec9-trusted-ca\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.162750 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/81acce8c-d724-494b-bdfd-df88546f6ac6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.162977 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/3916e963-6bce-4316-b02e-98b5565e8615-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.163250 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-oauth-serving-cert\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.163422 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-service-ca\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.163523 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-trusted-ca-bundle\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.164239 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f5d785d5-3f71-495f-99f0-fd89b646aec9-metrics-tls\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.165730 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/52038e8d-9971-49b4-86fb-8062c0eea326-metrics-tls\") pod \"dns-operator-744455d44c-v546c\" (UID: \"52038e8d-9971-49b4-86fb-8062c0eea326\") " pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.166555 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3916e963-6bce-4316-b02e-98b5565e8615-config\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.167722 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29484000-7gpqp"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.169192 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q9s2c"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.170623 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.171825 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.173178 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.175024 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2fdtt"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.176732 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.177947 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.179173 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.180371 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-26xkj"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.182850 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.184580 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.186447 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w7dnn"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.187248 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.188526 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.189768 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.191162 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9pmcq"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.192356 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.192656 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.192833 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-ht6jq"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.194115 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.194616 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ht6jq"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.196589 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9pmcq"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.197799 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q2gtb"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.198769 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.199214 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q2gtb"] Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.213103 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.221399 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-default-certificate\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.232051 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.240842 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-stats-auth\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.252520 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.258467 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-metrics-certs\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.271801 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.282586 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-serving-cert\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.292745 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.296301 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-config\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.312287 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.331856 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.338729 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a6356f84-ed83-468e-b825-808d0aa2c7d4-console-oauth-config\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.351849 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.373311 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.392733 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.403472 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90f39ea3-632a-4845-b3b9-6ed24a762baa-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.412893 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.416985 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90f39ea3-632a-4845-b3b9-6ed24a762baa-config\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.432567 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.443509 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2321c1dc-0a76-4b24-aed3-cd67eaf23486-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.455335 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.472054 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.492089 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.500053 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2321c1dc-0a76-4b24-aed3-cd67eaf23486-config\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.512123 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.532894 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.552390 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.571503 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.583558 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-serving-cert\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.592055 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.603377 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-client\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.611775 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.633471 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.652392 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.658203 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.658288 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.659713 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-config\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.671895 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.692591 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.696886 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-ca\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.712090 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.732345 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.738039 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-etcd-service-ca\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.751790 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.772623 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.794712 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.832005 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.838503 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/db0366e2-9560-4f1a-949c-66d4ddc09b89-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cvqvz\" (UID: \"db0366e2-9560-4f1a-949c-66d4ddc09b89\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.853234 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.871968 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.892360 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.898498 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/684b4808-40a6-495b-a8cf-7d48d54982bb-proxy-tls\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.912136 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.918087 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-6l5hd\" (UID: \"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.932218 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.951206 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.972339 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 00:09:34 crc kubenswrapper[4829]: I0122 00:09:34.992386 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.010861 4829 request.go:700] Waited for 1.014721753s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.012284 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.032729 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.041831 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/bd5d245d-600c-4790-89b2-3867c9e6279f-profile-collector-cert\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.053419 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.072535 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.084833 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/bd5d245d-600c-4790-89b2-3867c9e6279f-srv-cert\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.099832 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.112226 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.119265 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3127e466-6f03-48c9-8d8b-6de53678192c-images\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.132893 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.145224 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3127e466-6f03-48c9-8d8b-6de53678192c-proxy-tls\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.153150 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 00:09:35 crc kubenswrapper[4829]: E0122 00:09:35.158628 4829 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 00:09:35 crc kubenswrapper[4829]: E0122 00:09:35.158702 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-apiservice-cert podName:bae421e2-2b7d-465e-baeb-9cdca1f68dc3 nodeName:}" failed. No retries permitted until 2026-01-22 00:09:35.658681089 +0000 UTC m=+153.694923001 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-apiservice-cert") pod "packageserver-d55dfcdfc-kzt7t" (UID: "bae421e2-2b7d-465e-baeb-9cdca1f68dc3") : failed to sync secret cache: timed out waiting for the condition Jan 22 00:09:35 crc kubenswrapper[4829]: E0122 00:09:35.160674 4829 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Jan 22 00:09:35 crc kubenswrapper[4829]: E0122 00:09:35.160781 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-webhook-cert podName:bae421e2-2b7d-465e-baeb-9cdca1f68dc3 nodeName:}" failed. No retries permitted until 2026-01-22 00:09:35.660756464 +0000 UTC m=+153.696998406 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-webhook-cert") pod "packageserver-d55dfcdfc-kzt7t" (UID: "bae421e2-2b7d-465e-baeb-9cdca1f68dc3") : failed to sync secret cache: timed out waiting for the condition Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.172730 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.191847 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.231737 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.252408 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.286355 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.292359 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.312309 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.332294 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.352677 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.371714 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.393128 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.411703 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.433199 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.452635 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.472182 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.492388 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.513160 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.532607 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.552648 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.573006 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.592066 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.632433 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.633300 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.652496 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.671938 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.687728 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-apiservice-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.688433 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-webhook-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.694150 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-apiservice-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.700701 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-webhook-cert\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.721697 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7n75\" (UniqueName: \"kubernetes.io/projected/fc463c00-fc43-4cdc-8ff0-a5026aba9539-kube-api-access-j7n75\") pod \"route-controller-manager-6576b87f9c-rknpp\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.728144 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.752314 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.752664 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb4gv\" (UniqueName: \"kubernetes.io/projected/fb1dda78-6284-441f-9239-1e9f81282032-kube-api-access-xb4gv\") pod \"image-pruner-29484000-7gpqp\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.772718 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.791617 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.842261 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlq6g\" (UniqueName: \"kubernetes.io/projected/de89451c-cbc7-401d-ab19-f4ea8916fcb5-kube-api-access-hlq6g\") pod \"oauth-openshift-558db77b4-26xkj\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.849092 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwph9\" (UniqueName: \"kubernetes.io/projected/38a9a23b-efd4-452d-b4ee-8e5205b59cd5-kube-api-access-hwph9\") pod \"cluster-image-registry-operator-dc59b4c8b-7vqjh\" (UID: \"38a9a23b-efd4-452d-b4ee-8e5205b59cd5\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.868906 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf9rw\" (UniqueName: \"kubernetes.io/projected/d2b61594-3e3c-4445-9308-436bd10952c0-kube-api-access-nf9rw\") pod \"authentication-operator-69f744f599-wgx7d\" (UID: \"d2b61594-3e3c-4445-9308-436bd10952c0\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.900891 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfcwq\" (UniqueName: \"kubernetes.io/projected/72778ee0-ec95-4ab0-867c-1997b47449f5-kube-api-access-lfcwq\") pod \"apiserver-76f77b778f-gwk42\" (UID: \"72778ee0-ec95-4ab0-867c-1997b47449f5\") " pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.906632 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g8h8\" (UniqueName: \"kubernetes.io/projected/2e96a180-475f-4cdc-a89e-e0e61dbcbe53-kube-api-access-4g8h8\") pod \"apiserver-7bbb656c7d-8dsgz\" (UID: \"2e96a180-475f-4cdc-a89e-e0e61dbcbe53\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.928021 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.937174 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njjvv\" (UniqueName: \"kubernetes.io/projected/0e60009d-5985-47f9-b164-32cf604c23fa-kube-api-access-njjvv\") pod \"controller-manager-879f6c89f-q9d5h\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.937915 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.948204 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.961292 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-779kf\" (UniqueName: \"kubernetes.io/projected/2594295b-8073-45b0-8006-f7276c580e6d-kube-api-access-779kf\") pod \"console-operator-58897d9998-dj87x\" (UID: \"2594295b-8073-45b0-8006-f7276c580e6d\") " pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.966063 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.967921 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b7bb\" (UniqueName: \"kubernetes.io/projected/ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41-kube-api-access-8b7bb\") pod \"machine-approver-56656f9798-dj69l\" (UID: \"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.988828 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbmp5\" (UniqueName: \"kubernetes.io/projected/7f619c30-40fb-46a4-956e-366f2192703e-kube-api-access-kbmp5\") pod \"openshift-config-operator-7777fb866f-c47sd\" (UID: \"7f619c30-40fb-46a4-956e-366f2192703e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:35 crc kubenswrapper[4829]: I0122 00:09:35.998104 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.015282 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt8r8\" (UniqueName: \"kubernetes.io/projected/d1166d09-4d81-47db-803f-316f64bac8a7-kube-api-access-mt8r8\") pod \"openshift-apiserver-operator-796bbdcf4f-vh68c\" (UID: \"d1166d09-4d81-47db-803f-316f64bac8a7\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.016509 4829 request.go:700] Waited for 1.944309424s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/serviceaccounts/default/token Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.019989 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.031567 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvb9f\" (UniqueName: \"kubernetes.io/projected/9447fd29-4eae-4299-b266-1f5236931aee-kube-api-access-bvb9f\") pod \"downloads-7954f5f757-8tq29\" (UID: \"9447fd29-4eae-4299-b266-1f5236931aee\") " pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.066896 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7r99\" (UniqueName: \"kubernetes.io/projected/2ef479a2-183c-40f8-8bd7-e974de5a5305-kube-api-access-m7r99\") pod \"cluster-samples-operator-665b6dd947-jqdzd\" (UID: \"2ef479a2-183c-40f8-8bd7-e974de5a5305\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.083008 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.085624 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkmpv\" (UniqueName: \"kubernetes.io/projected/1cf7eed9-6c3d-4721-bc4d-bc58044750e0-kube-api-access-xkmpv\") pod \"router-default-5444994796-z9x4d\" (UID: \"1cf7eed9-6c3d-4721-bc4d-bc58044750e0\") " pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.091716 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6689\" (UniqueName: \"kubernetes.io/projected/dad2b02a-4000-44b8-acad-460b5a394c42-kube-api-access-s6689\") pod \"migrator-59844c95c7-zkmjd\" (UID: \"dad2b02a-4000-44b8-acad-460b5a394c42\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.095506 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.104508 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87dhn\" (UniqueName: \"kubernetes.io/projected/a6356f84-ed83-468e-b825-808d0aa2c7d4-kube-api-access-87dhn\") pod \"console-f9d7485db-2fdtt\" (UID: \"a6356f84-ed83-468e-b825-808d0aa2c7d4\") " pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.124873 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.127832 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.128363 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.129465 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjmz5\" (UniqueName: \"kubernetes.io/projected/3127e466-6f03-48c9-8d8b-6de53678192c-kube-api-access-fjmz5\") pod \"machine-config-operator-74547568cd-nld4t\" (UID: \"3127e466-6f03-48c9-8d8b-6de53678192c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.134070 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.144076 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.155317 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.162451 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2321c1dc-0a76-4b24-aed3-cd67eaf23486-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2glr6\" (UID: \"2321c1dc-0a76-4b24-aed3-cd67eaf23486\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.256435 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.259696 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn6t7\" (UniqueName: \"kubernetes.io/projected/81acce8c-d724-494b-bdfd-df88546f6ac6-kube-api-access-pn6t7\") pod \"openshift-controller-manager-operator-756b6f6bc6-n7mhk\" (UID: \"81acce8c-d724-494b-bdfd-df88546f6ac6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.283290 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.289579 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj2lg\" (UniqueName: \"kubernetes.io/projected/7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2-kube-api-access-hj2lg\") pod \"multus-admission-controller-857f4d67dd-6l5hd\" (UID: \"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.293050 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6swsg\" (UniqueName: \"kubernetes.io/projected/684b4808-40a6-495b-a8cf-7d48d54982bb-kube-api-access-6swsg\") pod \"machine-config-controller-84d6567774-t695d\" (UID: \"684b4808-40a6-495b-a8cf-7d48d54982bb\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.293457 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90f39ea3-632a-4845-b3b9-6ed24a762baa-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-vcq2q\" (UID: \"90f39ea3-632a-4845-b3b9-6ed24a762baa\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.295252 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpnpt\" (UniqueName: \"kubernetes.io/projected/bae421e2-2b7d-465e-baeb-9cdca1f68dc3-kube-api-access-zpnpt\") pod \"packageserver-d55dfcdfc-kzt7t\" (UID: \"bae421e2-2b7d-465e-baeb-9cdca1f68dc3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.317970 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tz99n\" (UniqueName: \"kubernetes.io/projected/bd5d245d-600c-4790-89b2-3867c9e6279f-kube-api-access-tz99n\") pod \"catalog-operator-68c6474976-5j8nc\" (UID: \"bd5d245d-600c-4790-89b2-3867c9e6279f\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.319627 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.323739 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f5d785d5-3f71-495f-99f0-fd89b646aec9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.329183 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.330830 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.340529 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txw9z\" (UniqueName: \"kubernetes.io/projected/8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b-kube-api-access-txw9z\") pod \"etcd-operator-b45778765-q9s2c\" (UID: \"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.343880 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5hfp\" (UniqueName: \"kubernetes.io/projected/52038e8d-9971-49b4-86fb-8062c0eea326-kube-api-access-g5hfp\") pod \"dns-operator-744455d44c-v546c\" (UID: \"52038e8d-9971-49b4-86fb-8062c0eea326\") " pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.345121 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.349901 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhq4m\" (UniqueName: \"kubernetes.io/projected/f5d785d5-3f71-495f-99f0-fd89b646aec9-kube-api-access-zhq4m\") pod \"ingress-operator-5b745b69d9-sg4kz\" (UID: \"f5d785d5-3f71-495f-99f0-fd89b646aec9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.366790 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.369508 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8skhl\" (UniqueName: \"kubernetes.io/projected/db0366e2-9560-4f1a-949c-66d4ddc09b89-kube-api-access-8skhl\") pod \"control-plane-machine-set-operator-78cbb6b69f-cvqvz\" (UID: \"db0366e2-9560-4f1a-949c-66d4ddc09b89\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.371893 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.388916 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4vfg\" (UniqueName: \"kubernetes.io/projected/3916e963-6bce-4316-b02e-98b5565e8615-kube-api-access-w4vfg\") pod \"machine-api-operator-5694c8668f-nxg4z\" (UID: \"3916e963-6bce-4316-b02e-98b5565e8615\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.391897 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.396201 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.404238 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.412095 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.412808 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.582889 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.583753 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.588106 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.595638 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" event={"ID":"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41","Type":"ContainerStarted","Data":"15015f1f8b9ba7d3338b95056f59d6729556bc857ebf5b622720adc346ecd170"} Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.596659 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-z9x4d" event={"ID":"1cf7eed9-6c3d-4721-bc4d-bc58044750e0","Type":"ContainerStarted","Data":"2f8eaa22aa709610ca6b20f67233c3e6291fadea26b724b64f285529a1fd2efb"} Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.599093 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.607157 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-v546c" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.617948 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.617983 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.618201 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.618206 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.618421 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.618586 4829 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.618686 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.623037 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690428 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690485 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690590 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690621 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-trusted-ca\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690755 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690845 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-certificates\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690952 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wwn2\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-kube-api-access-9wwn2\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.690988 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.691043 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-tls\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.691079 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-bound-sa-token\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.691131 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: E0122 00:09:36.691646 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.191634078 +0000 UTC m=+155.227875990 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.797721 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.797943 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-tls\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.797963 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-bound-sa-token\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.797986 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/562761b1-7f83-4d7a-b933-952fd9ad9963-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vdnc9\" (UID: \"562761b1-7f83-4d7a-b933-952fd9ad9963\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798023 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqhpc\" (UniqueName: \"kubernetes.io/projected/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-kube-api-access-gqhpc\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798038 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-config-volume\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798056 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-secret-volume\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798093 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/db154a27-c1c5-498a-8184-7264c954bb47-profile-collector-cert\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798117 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-registration-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798134 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/db154a27-c1c5-498a-8184-7264c954bb47-srv-cert\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798296 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-plugins-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798313 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3256e943-667b-4f8d-a732-6a1de0e9d6e4-node-bootstrap-token\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798347 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798364 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxrpq\" (UniqueName: \"kubernetes.io/projected/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-kube-api-access-gxrpq\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798407 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24923fa2-e2a6-408a-a342-6792e1baf637-serving-cert\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798443 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798461 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-signing-key\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798487 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-mountpoint-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798564 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-csi-data-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798582 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3256e943-667b-4f8d-a732-6a1de0e9d6e4-certs\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798625 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798641 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-config-volume\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798661 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5354ee45-800d-4a08-be4e-64c5ae44811e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798708 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgddd\" (UniqueName: \"kubernetes.io/projected/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-kube-api-access-xgddd\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798735 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-trusted-ca\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798753 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-signing-cabundle\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798769 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5354ee45-800d-4a08-be4e-64c5ae44811e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798794 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-socket-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798842 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vccdk\" (UniqueName: \"kubernetes.io/projected/5354ee45-800d-4a08-be4e-64c5ae44811e-kube-api-access-vccdk\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798878 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dcde760-e466-49aa-a092-1385876196ef-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.798907 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799013 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hljt\" (UniqueName: \"kubernetes.io/projected/562761b1-7f83-4d7a-b933-952fd9ad9963-kube-api-access-9hljt\") pod \"package-server-manager-789f6589d5-vdnc9\" (UID: \"562761b1-7f83-4d7a-b933-952fd9ad9963\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799051 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24923fa2-e2a6-408a-a342-6792e1baf637-config\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799066 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6db38a65-0f46-4da8-8984-f6966500afc4-cert\") pod \"ingress-canary-9pmcq\" (UID: \"6db38a65-0f46-4da8-8984-f6966500afc4\") " pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799082 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjtwq\" (UniqueName: \"kubernetes.io/projected/6db38a65-0f46-4da8-8984-f6966500afc4-kube-api-access-bjtwq\") pod \"ingress-canary-9pmcq\" (UID: \"6db38a65-0f46-4da8-8984-f6966500afc4\") " pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799193 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2dcde760-e466-49aa-a092-1385876196ef-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799254 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-certificates\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799275 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfhw9\" (UniqueName: \"kubernetes.io/projected/2dcde760-e466-49aa-a092-1385876196ef-kube-api-access-tfhw9\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799294 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwqlf\" (UniqueName: \"kubernetes.io/projected/24923fa2-e2a6-408a-a342-6792e1baf637-kube-api-access-jwqlf\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799325 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmdth\" (UniqueName: \"kubernetes.io/projected/a660b566-870f-40b9-a790-6e6f2baa1e90-kube-api-access-nmdth\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799390 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-metrics-tls\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799485 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7867n\" (UniqueName: \"kubernetes.io/projected/3256e943-667b-4f8d-a732-6a1de0e9d6e4-kube-api-access-7867n\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799550 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wwn2\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-kube-api-access-9wwn2\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799572 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.799590 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wx6j4\" (UniqueName: \"kubernetes.io/projected/db154a27-c1c5-498a-8184-7264c954bb47-kube-api-access-wx6j4\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.803852 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-certificates\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.811312 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-tls\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.818452 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-trusted-ca\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.930381 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-ca-trust-extracted\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.947149 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.947191 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.948476 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-bound-sa-token\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: E0122 00:09:36.958798 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.458768484 +0000 UTC m=+155.495010396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.984488 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgddd\" (UniqueName: \"kubernetes.io/projected/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-kube-api-access-xgddd\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.986279 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/db154a27-c1c5-498a-8184-7264c954bb47-profile-collector-cert\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.987192 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ecf6e43-b0bb-454a-9165-2e5e44ddba3c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6bgqw\" (UID: \"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:36 crc kubenswrapper[4829]: I0122 00:09:36.987981 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:36 crc kubenswrapper[4829]: E0122 00:09:36.988409 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.488389386 +0000 UTC m=+155.524631298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.000715 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wwn2\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-kube-api-access-9wwn2\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.002442 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-installation-pull-secrets\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.024959 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/db154a27-c1c5-498a-8184-7264c954bb47-profile-collector-cert\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.042380 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgddd\" (UniqueName: \"kubernetes.io/projected/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-kube-api-access-xgddd\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089310 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.089557 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.589513519 +0000 UTC m=+155.625755431 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089634 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmdth\" (UniqueName: \"kubernetes.io/projected/a660b566-870f-40b9-a790-6e6f2baa1e90-kube-api-access-nmdth\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089659 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-metrics-tls\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089680 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7867n\" (UniqueName: \"kubernetes.io/projected/3256e943-667b-4f8d-a732-6a1de0e9d6e4-kube-api-access-7867n\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089697 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wx6j4\" (UniqueName: \"kubernetes.io/projected/db154a27-c1c5-498a-8184-7264c954bb47-kube-api-access-wx6j4\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089722 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqhpc\" (UniqueName: \"kubernetes.io/projected/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-kube-api-access-gqhpc\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089738 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-config-volume\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089755 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/562761b1-7f83-4d7a-b933-952fd9ad9963-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vdnc9\" (UID: \"562761b1-7f83-4d7a-b933-952fd9ad9963\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089780 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-secret-volume\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089798 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-registration-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089812 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/db154a27-c1c5-498a-8184-7264c954bb47-srv-cert\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089833 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089863 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-plugins-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089879 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3256e943-667b-4f8d-a732-6a1de0e9d6e4-node-bootstrap-token\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089898 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxrpq\" (UniqueName: \"kubernetes.io/projected/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-kube-api-access-gxrpq\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089916 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24923fa2-e2a6-408a-a342-6792e1baf637-serving-cert\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089932 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-signing-key\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089950 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-mountpoint-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089965 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-csi-data-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.089987 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3256e943-667b-4f8d-a732-6a1de0e9d6e4-certs\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090004 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-config-volume\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090023 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5354ee45-800d-4a08-be4e-64c5ae44811e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090043 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-signing-cabundle\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090058 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5354ee45-800d-4a08-be4e-64c5ae44811e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090075 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-socket-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090094 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vccdk\" (UniqueName: \"kubernetes.io/projected/5354ee45-800d-4a08-be4e-64c5ae44811e-kube-api-access-vccdk\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090110 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dcde760-e466-49aa-a092-1385876196ef-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090135 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hljt\" (UniqueName: \"kubernetes.io/projected/562761b1-7f83-4d7a-b933-952fd9ad9963-kube-api-access-9hljt\") pod \"package-server-manager-789f6589d5-vdnc9\" (UID: \"562761b1-7f83-4d7a-b933-952fd9ad9963\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090150 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6db38a65-0f46-4da8-8984-f6966500afc4-cert\") pod \"ingress-canary-9pmcq\" (UID: \"6db38a65-0f46-4da8-8984-f6966500afc4\") " pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090166 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjtwq\" (UniqueName: \"kubernetes.io/projected/6db38a65-0f46-4da8-8984-f6966500afc4-kube-api-access-bjtwq\") pod \"ingress-canary-9pmcq\" (UID: \"6db38a65-0f46-4da8-8984-f6966500afc4\") " pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090180 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24923fa2-e2a6-408a-a342-6792e1baf637-config\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090198 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2dcde760-e466-49aa-a092-1385876196ef-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090213 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwqlf\" (UniqueName: \"kubernetes.io/projected/24923fa2-e2a6-408a-a342-6792e1baf637-kube-api-access-jwqlf\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090229 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfhw9\" (UniqueName: \"kubernetes.io/projected/2dcde760-e466-49aa-a092-1385876196ef-kube-api-access-tfhw9\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.090329 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-plugins-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.093276 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-metrics-tls\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.094317 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4e86e71a-0c33-41b1-854d-0ba4d03af4e6-config-volume\") pod \"dns-default-ht6jq\" (UID: \"4e86e71a-0c33-41b1-854d-0ba4d03af4e6\") " pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.094799 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-registration-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.096924 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.596910842 +0000 UTC m=+155.633152764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.097371 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3256e943-667b-4f8d-a732-6a1de0e9d6e4-node-bootstrap-token\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.097481 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/562761b1-7f83-4d7a-b933-952fd9ad9963-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vdnc9\" (UID: \"562761b1-7f83-4d7a-b933-952fd9ad9963\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.097516 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-socket-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.097608 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-csi-data-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.098745 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dcde760-e466-49aa-a092-1385876196ef-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.103679 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-secret-volume\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.104650 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-config-volume\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.105319 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5354ee45-800d-4a08-be4e-64c5ae44811e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.105787 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24923fa2-e2a6-408a-a342-6792e1baf637-config\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.105933 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a660b566-870f-40b9-a790-6e6f2baa1e90-mountpoint-dir\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.106514 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-signing-cabundle\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.108967 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2dcde760-e466-49aa-a092-1385876196ef-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.109315 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3256e943-667b-4f8d-a732-6a1de0e9d6e4-certs\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.110206 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-signing-key\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.111525 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24923fa2-e2a6-408a-a342-6792e1baf637-serving-cert\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.112065 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/db154a27-c1c5-498a-8184-7264c954bb47-srv-cert\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.112457 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5354ee45-800d-4a08-be4e-64c5ae44811e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.130395 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vccdk\" (UniqueName: \"kubernetes.io/projected/5354ee45-800d-4a08-be4e-64c5ae44811e-kube-api-access-vccdk\") pod \"kube-storage-version-migrator-operator-b67b599dd-2g8pk\" (UID: \"5354ee45-800d-4a08-be4e-64c5ae44811e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.135369 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfhw9\" (UniqueName: \"kubernetes.io/projected/2dcde760-e466-49aa-a092-1385876196ef-kube-api-access-tfhw9\") pod \"marketplace-operator-79b997595-w7dnn\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.136053 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmdth\" (UniqueName: \"kubernetes.io/projected/a660b566-870f-40b9-a790-6e6f2baa1e90-kube-api-access-nmdth\") pod \"csi-hostpathplugin-q2gtb\" (UID: \"a660b566-870f-40b9-a790-6e6f2baa1e90\") " pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.137552 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6db38a65-0f46-4da8-8984-f6966500afc4-cert\") pod \"ingress-canary-9pmcq\" (UID: \"6db38a65-0f46-4da8-8984-f6966500afc4\") " pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.138149 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.147712 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wx6j4\" (UniqueName: \"kubernetes.io/projected/db154a27-c1c5-498a-8184-7264c954bb47-kube-api-access-wx6j4\") pod \"olm-operator-6b444d44fb-gfvpq\" (UID: \"db154a27-c1c5-498a-8184-7264c954bb47\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.147723 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7867n\" (UniqueName: \"kubernetes.io/projected/3256e943-667b-4f8d-a732-6a1de0e9d6e4-kube-api-access-7867n\") pod \"machine-config-server-cbmj7\" (UID: \"3256e943-667b-4f8d-a732-6a1de0e9d6e4\") " pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.148165 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cbmj7" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.151884 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxrpq\" (UniqueName: \"kubernetes.io/projected/f0f51904-83ab-43d1-87ef-8d7efbe1a6c8-kube-api-access-gxrpq\") pod \"service-ca-9c57cc56f-qxrp6\" (UID: \"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8\") " pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.153749 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqhpc\" (UniqueName: \"kubernetes.io/projected/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-kube-api-access-gqhpc\") pod \"collect-profiles-29484000-n7tmf\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.157668 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hljt\" (UniqueName: \"kubernetes.io/projected/562761b1-7f83-4d7a-b933-952fd9ad9963-kube-api-access-9hljt\") pod \"package-server-manager-789f6589d5-vdnc9\" (UID: \"562761b1-7f83-4d7a-b933-952fd9ad9963\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.166773 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjtwq\" (UniqueName: \"kubernetes.io/projected/6db38a65-0f46-4da8-8984-f6966500afc4-kube-api-access-bjtwq\") pod \"ingress-canary-9pmcq\" (UID: \"6db38a65-0f46-4da8-8984-f6966500afc4\") " pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.178708 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.192673 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwqlf\" (UniqueName: \"kubernetes.io/projected/24923fa2-e2a6-408a-a342-6792e1baf637-kube-api-access-jwqlf\") pod \"service-ca-operator-777779d784-s5bvx\" (UID: \"24923fa2-e2a6-408a-a342-6792e1baf637\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.193265 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.193361 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.693339776 +0000 UTC m=+155.729581688 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.193573 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.193901 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.693892534 +0000 UTC m=+155.730134436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.195394 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp"] Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.219759 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.282358 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.298735 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29484000-7gpqp"] Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.298939 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.299065 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.799021392 +0000 UTC m=+155.835263304 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.299302 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.299615 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.79960864 +0000 UTC m=+155.835850552 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.378117 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.388389 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.396258 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.405171 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.405583 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:37.905565914 +0000 UTC m=+155.941807826 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.410684 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.421625 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.438192 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.482850 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9pmcq" Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.506094 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.506361 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.006350886 +0000 UTC m=+156.042592798 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.608031 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.608418 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.108400398 +0000 UTC m=+156.144642310 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.625959 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cbmj7" event={"ID":"3256e943-667b-4f8d-a732-6a1de0e9d6e4","Type":"ContainerStarted","Data":"a946a706089b045335d624d5fae8b89dadc44f00d543240df7d20331335e29fc"} Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.652111 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" event={"ID":"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41","Type":"ContainerStarted","Data":"328ed11c5d859688f72311f2a348c3bca4532401ee56fdb6b325df0ee73c5e95"} Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.660997 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-z9x4d" event={"ID":"1cf7eed9-6c3d-4721-bc4d-bc58044750e0","Type":"ContainerStarted","Data":"dc85201a5b3d822ca8df22871b0bb31e4d389b073ac79db1a3a5bfc7ceb8409f"} Jan 22 00:09:37 crc kubenswrapper[4829]: I0122 00:09:37.727591 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:37 crc kubenswrapper[4829]: E0122 00:09:37.881497 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.381478841 +0000 UTC m=+156.417720823 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:37.982466 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:37.983253 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.483228093 +0000 UTC m=+156.519470005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.106819 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.107364 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.607347769 +0000 UTC m=+156.643589681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.226614 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.226801 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.726766987 +0000 UTC m=+156.763008899 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.227458 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.228805 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.72878888 +0000 UTC m=+156.765030792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.333285 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.333592 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.334073 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.834049663 +0000 UTC m=+156.870291575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.381215 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.381267 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.437281 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh"] Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.438723 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.439271 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:38.939253493 +0000 UTC m=+156.975495405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.539507 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.539930 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.039911571 +0000 UTC m=+157.076153483 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: W0122 00:09:38.604076 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38a9a23b_efd4_452d_b4ee_8e5205b59cd5.slice/crio-fa2514674357600dfe2e1bc0d4120a291937ec08a5cdb93898a69f54ab2b4dec WatchSource:0}: Error finding container fa2514674357600dfe2e1bc0d4120a291937ec08a5cdb93898a69f54ab2b4dec: Status 404 returned error can't find the container with id fa2514674357600dfe2e1bc0d4120a291937ec08a5cdb93898a69f54ab2b4dec Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.641621 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.641949 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.141937522 +0000 UTC m=+157.178179434 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.654027 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-z9x4d" podStartSLOduration=133.654009991 podStartE2EDuration="2m13.654009991s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:38.64696343 +0000 UTC m=+156.683205342" watchObservedRunningTime="2026-01-22 00:09:38.654009991 +0000 UTC m=+156.690251903" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.669072 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cbmj7" event={"ID":"3256e943-667b-4f8d-a732-6a1de0e9d6e4","Type":"ContainerStarted","Data":"1e06b620b55461d88159c1db13a46c5c75e093c3b9fbe0e0e072fdc30ccefe1c"} Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.670276 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29484000-7gpqp" event={"ID":"fb1dda78-6284-441f-9239-1e9f81282032","Type":"ContainerStarted","Data":"4d4b531428b87c3f22ccf9cd2995e8a5e2929dab98f04e652c64f99bf0ad2a81"} Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.670320 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29484000-7gpqp" event={"ID":"fb1dda78-6284-441f-9239-1e9f81282032","Type":"ContainerStarted","Data":"ab0cccce306b7d79aa9c7bc3cb35a31126bf0b62670017f832b941e6130ed35d"} Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.671598 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" event={"ID":"38a9a23b-efd4-452d-b4ee-8e5205b59cd5","Type":"ContainerStarted","Data":"fa2514674357600dfe2e1bc0d4120a291937ec08a5cdb93898a69f54ab2b4dec"} Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.672569 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" event={"ID":"fc463c00-fc43-4cdc-8ff0-a5026aba9539","Type":"ContainerStarted","Data":"c245a3b6afbfec49810a2682a51541ec1be31695ca4b2a88ea188c398236ae80"} Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.672591 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" event={"ID":"fc463c00-fc43-4cdc-8ff0-a5026aba9539","Type":"ContainerStarted","Data":"2036e912958295c80841153ed3110802fe4724a1c236a4608ddcd33076fa18d9"} Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.673320 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.684204 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" event={"ID":"ffe3f4a6-a78a-4a5d-bbdd-4bda32cf8d41","Type":"ContainerStarted","Data":"594059a277675a630f50442090c304178b21618eb6824bc87483a28a777c570e"} Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.707182 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-cbmj7" podStartSLOduration=5.707166164 podStartE2EDuration="5.707166164s" podCreationTimestamp="2026-01-22 00:09:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:38.705033817 +0000 UTC m=+156.741275729" watchObservedRunningTime="2026-01-22 00:09:38.707166164 +0000 UTC m=+156.743408076" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.729426 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dj69l" podStartSLOduration=133.729407634 podStartE2EDuration="2m13.729407634s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:38.728109703 +0000 UTC m=+156.764351615" watchObservedRunningTime="2026-01-22 00:09:38.729407634 +0000 UTC m=+156.765649546" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.730031 4829 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-rknpp container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.730073 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.744817 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.745378 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.245363387 +0000 UTC m=+157.281605299 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.768226 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" podStartSLOduration=132.768206846 podStartE2EDuration="2m12.768206846s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:38.76771439 +0000 UTC m=+156.803956302" watchObservedRunningTime="2026-01-22 00:09:38.768206846 +0000 UTC m=+156.804448758" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.797804 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29484000-7gpqp" podStartSLOduration=133.797778796 podStartE2EDuration="2m13.797778796s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:38.79474731 +0000 UTC m=+156.830989222" watchObservedRunningTime="2026-01-22 00:09:38.797778796 +0000 UTC m=+156.834020708" Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.849415 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.851941 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.35192666 +0000 UTC m=+157.388168572 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.950184 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:38 crc kubenswrapper[4829]: E0122 00:09:38.950510 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.450494271 +0000 UTC m=+157.486736183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.958626 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-q9d5h"] Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.983761 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-gwk42"] Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.987686 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dj87x"] Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.991215 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk"] Jan 22 00:09:38 crc kubenswrapper[4829]: I0122 00:09:38.995325 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:38.999741 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-wgx7d"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.004107 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-26xkj"] Jan 22 00:09:39 crc kubenswrapper[4829]: W0122 00:09:39.045325 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72778ee0_ec95_4ab0_867c_1997b47449f5.slice/crio-b70e03bc5a57cfd2a17450fa88b4a6ceb273f2cdcef19627a6019200dc8c99d0 WatchSource:0}: Error finding container b70e03bc5a57cfd2a17450fa88b4a6ceb273f2cdcef19627a6019200dc8c99d0: Status 404 returned error can't find the container with id b70e03bc5a57cfd2a17450fa88b4a6ceb273f2cdcef19627a6019200dc8c99d0 Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.051703 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.052048 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.552035737 +0000 UTC m=+157.588277639 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.086119 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.088981 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.091271 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-c47sd"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.152359 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.152516 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.652494908 +0000 UTC m=+157.688736820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.152719 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.153032 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.653020735 +0000 UTC m=+157.689262647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.254469 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.256901 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.756754919 +0000 UTC m=+157.792996841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.264556 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-t695d"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.276858 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-6l5hd"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.282445 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8tq29"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.289681 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.293050 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.313145 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.341621 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:39 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:39 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:39 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.341657 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.357441 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.357877 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.857853411 +0000 UTC m=+157.894095383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: W0122 00:09:39.359877 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9447fd29_4eae_4299_b266_1f5236931aee.slice/crio-ff02b4b0e94d3d3ad4ad27211cc104939df13d4074fced94ad80f9db3dfbcd02 WatchSource:0}: Error finding container ff02b4b0e94d3d3ad4ad27211cc104939df13d4074fced94ad80f9db3dfbcd02: Status 404 returned error can't find the container with id ff02b4b0e94d3d3ad4ad27211cc104939df13d4074fced94ad80f9db3dfbcd02 Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.458325 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.458888 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:39.958853229 +0000 UTC m=+157.995095131 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.510698 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ht6jq"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.525382 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9pmcq"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.529635 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.531563 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w7dnn"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.540720 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qxrp6"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.566116 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.566552 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.066516436 +0000 UTC m=+158.102758348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.580918 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.583256 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.593073 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.627291 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nxg4z"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.643206 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-v546c"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.646897 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2fdtt"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.668365 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.668879 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.16862298 +0000 UTC m=+158.204864892 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.675242 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.675734 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.175717533 +0000 UTC m=+158.211959435 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.691502 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" event={"ID":"72778ee0-ec95-4ab0-867c-1997b47449f5","Type":"ContainerStarted","Data":"b70e03bc5a57cfd2a17450fa88b4a6ceb273f2cdcef19627a6019200dc8c99d0"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.693096 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" event={"ID":"0e60009d-5985-47f9-b164-32cf604c23fa","Type":"ContainerStarted","Data":"c1412b09bfd9577fc6d98be98ea9829c4e95361029ee125457dae15f47eb01e7"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.693125 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" event={"ID":"0e60009d-5985-47f9-b164-32cf604c23fa","Type":"ContainerStarted","Data":"f2c12fd83874c399e632dcaaf4d173fae6ad65735bf179df4ce71b632389b1dc"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.693477 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.694458 4829 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-q9d5h container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.694492 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.695939 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" event={"ID":"81acce8c-d724-494b-bdfd-df88546f6ac6","Type":"ContainerStarted","Data":"aeea3d79ce825227e0f525a8347f500c08b62ee1dfa0e6114bb4b939af4d6732"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.695963 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" event={"ID":"81acce8c-d724-494b-bdfd-df88546f6ac6","Type":"ContainerStarted","Data":"7ffd2b7c5041cb70fdab94234e397ae4aad957c0b54969e1c9400d9a5c5cc791"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.697645 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" event={"ID":"bd5d245d-600c-4790-89b2-3867c9e6279f","Type":"ContainerStarted","Data":"59b83a36434c2c16f745cbd498121d0399318b43f5b27b0044338768abb407ed"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.699210 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" event={"ID":"2321c1dc-0a76-4b24-aed3-cd67eaf23486","Type":"ContainerStarted","Data":"3e5e07bdbd4181be0be77dbd0de749c6fb3bc26caba00b18ead37f5c142f067a"} Jan 22 00:09:39 crc kubenswrapper[4829]: W0122 00:09:39.712105 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ecf6e43_b0bb_454a_9165_2e5e44ddba3c.slice/crio-c05d2312bd31b217f0d6a04b9adf58e33280fd779d4b6639c60cf1035bdf4103 WatchSource:0}: Error finding container c05d2312bd31b217f0d6a04b9adf58e33280fd779d4b6639c60cf1035bdf4103: Status 404 returned error can't find the container with id c05d2312bd31b217f0d6a04b9adf58e33280fd779d4b6639c60cf1035bdf4103 Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.712270 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" event={"ID":"d1166d09-4d81-47db-803f-316f64bac8a7","Type":"ContainerStarted","Data":"050865395ab066bded5e7e16a7fde22ac2b1011778a84e5eebe8764710059e41"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.715334 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9pmcq" event={"ID":"6db38a65-0f46-4da8-8984-f6966500afc4","Type":"ContainerStarted","Data":"ee430b4000c97d068ba2f0ab9640f5bdd381bc7204bfe7e2b0a9affa019a4bdc"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.717103 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" event={"ID":"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2","Type":"ContainerStarted","Data":"f4ee6171af0590d997832f43f0066d7b96a3a64c89ba6aa6f69e7a99e4125ead"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.718422 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" event={"ID":"dad2b02a-4000-44b8-acad-460b5a394c42","Type":"ContainerStarted","Data":"557651e67aa804bb41ca6c74e436a944b7db5d7ff28102095060ba8d9a6241e4"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.719865 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" event={"ID":"2e96a180-475f-4cdc-a89e-e0e61dbcbe53","Type":"ContainerStarted","Data":"2b0e3e00395ebcbb57c2f52669d462e65118d41c346683325ceebdfaa8ccb73c"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.736789 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" event={"ID":"684b4808-40a6-495b-a8cf-7d48d54982bb","Type":"ContainerStarted","Data":"f871e1f2e31ffe3f7021f0de925915f870d40cefcf622394c174bcddbde65c32"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.738332 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" event={"ID":"38a9a23b-efd4-452d-b4ee-8e5205b59cd5","Type":"ContainerStarted","Data":"710826bfd0015f623753dee138a6fdd047b2dc3a1f9b04f883cb753e859db207"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.739563 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" event={"ID":"24923fa2-e2a6-408a-a342-6792e1baf637","Type":"ContainerStarted","Data":"447605294f17bdf781a0716d55c7bbdcf8b8af7a51a55244a5f41ef99f6b5f59"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.746506 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" podStartSLOduration=134.74648193 podStartE2EDuration="2m14.74648193s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:39.71248767 +0000 UTC m=+157.748729582" watchObservedRunningTime="2026-01-22 00:09:39.74648193 +0000 UTC m=+157.782723842" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.747393 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.749780 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-n7mhk" podStartSLOduration=134.749770304 podStartE2EDuration="2m14.749770304s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:39.744465117 +0000 UTC m=+157.780707029" watchObservedRunningTime="2026-01-22 00:09:39.749770304 +0000 UTC m=+157.786012216" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.757213 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.771199 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" event={"ID":"2ef479a2-183c-40f8-8bd7-e974de5a5305","Type":"ContainerStarted","Data":"5ce3fc6bd2b9d7a2160b610b1cfd4433f13f9f032c73bf29bb032862690f8acf"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.771239 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" event={"ID":"2ef479a2-183c-40f8-8bd7-e974de5a5305","Type":"ContainerStarted","Data":"8e843ebdf44fd8ba8f3aac54d23885eba505506b00ba49982c9d272c29c7a7b3"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.779467 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.779794 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.279779588 +0000 UTC m=+158.316021500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.779925 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" event={"ID":"f5d785d5-3f71-495f-99f0-fd89b646aec9","Type":"ContainerStarted","Data":"5b04d96846520ceec7d0e93cadb6949385ec5b6762db03075f932157552ce4f6"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.780328 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7vqjh" podStartSLOduration=134.780310745 podStartE2EDuration="2m14.780310745s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:39.779004453 +0000 UTC m=+157.815246355" watchObservedRunningTime="2026-01-22 00:09:39.780310745 +0000 UTC m=+157.816552657" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.787662 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q9s2c"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.808112 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" event={"ID":"2dcde760-e466-49aa-a092-1385876196ef","Type":"ContainerStarted","Data":"439d5dc1d516cdde09c349d7ad153ba4049fda1218d57d75bbae72bd66dc0f41"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.817344 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" event={"ID":"d2b61594-3e3c-4445-9308-436bd10952c0","Type":"ContainerStarted","Data":"2ee8fc96cb69f54bb075a26d00fb6fe24cfb80025dd60c22a59d8cc40b1cc2db"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.817387 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" event={"ID":"d2b61594-3e3c-4445-9308-436bd10952c0","Type":"ContainerStarted","Data":"6c8a74b2c38f182d6cffab168c8133582e9e9ed04535a8de5d9c4a6f60faafc1"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.819252 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8tq29" event={"ID":"9447fd29-4eae-4299-b266-1f5236931aee","Type":"ContainerStarted","Data":"ff02b4b0e94d3d3ad4ad27211cc104939df13d4074fced94ad80f9db3dfbcd02"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.820842 4829 generic.go:334] "Generic (PLEG): container finished" podID="7f619c30-40fb-46a4-956e-366f2192703e" containerID="a627c5ca812be450f0e872cb87e3e621516509faa2741a0e5221a83b8ec77865" exitCode=0 Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.820902 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" event={"ID":"7f619c30-40fb-46a4-956e-366f2192703e","Type":"ContainerDied","Data":"a627c5ca812be450f0e872cb87e3e621516509faa2741a0e5221a83b8ec77865"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.821156 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" event={"ID":"7f619c30-40fb-46a4-956e-366f2192703e","Type":"ContainerStarted","Data":"d64ecf53bc45567e9873eafa5e6cd601def6abebe7223e346aeaa34851d68b56"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.822196 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ht6jq" event={"ID":"4e86e71a-0c33-41b1-854d-0ba4d03af4e6","Type":"ContainerStarted","Data":"16a10f3d493c9c4d09512abe3c592bb7f32a4c04c199d0f41f40538bd81fe107"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.823231 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" event={"ID":"de89451c-cbc7-401d-ab19-f4ea8916fcb5","Type":"ContainerStarted","Data":"db2cae3d180e6e929f48e6521b1938269f6b06a47fdc638f91195c2251bbb82c"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.823921 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.827123 4829 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-26xkj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.35:6443/healthz\": dial tcp 10.217.0.35:6443: connect: connection refused" start-of-body= Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.827191 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" podUID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.35:6443/healthz\": dial tcp 10.217.0.35:6443: connect: connection refused" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.839026 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-q2gtb"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.841331 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dj87x" event={"ID":"2594295b-8073-45b0-8006-f7276c580e6d","Type":"ContainerStarted","Data":"f4f8956ea10db0b729eac304d564d9411f0f64b739b6faec236746d59f7c91e1"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.841375 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dj87x" event={"ID":"2594295b-8073-45b0-8006-f7276c580e6d","Type":"ContainerStarted","Data":"23e1d3846dfe2c6caf4307d90652f279468ebd60a820ad30a52b659595a8e862"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.841693 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.846689 4829 patch_prober.go:28] interesting pod/console-operator-58897d9998-dj87x container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.846738 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dj87x" podUID="2594295b-8073-45b0-8006-f7276c580e6d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.851197 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-wgx7d" podStartSLOduration=134.851178035 podStartE2EDuration="2m14.851178035s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:39.840306913 +0000 UTC m=+157.876548845" watchObservedRunningTime="2026-01-22 00:09:39.851178035 +0000 UTC m=+157.887419947" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.852522 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.868684 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" event={"ID":"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8","Type":"ContainerStarted","Data":"0af835115bfa1055bfcbb6e8adeddfc3d04901583a4b0411ad86ad5dcbdb8a9f"} Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.876838 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.882671 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" podStartSLOduration=134.882645515 podStartE2EDuration="2m14.882645515s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:39.873154667 +0000 UTC m=+157.909396579" watchObservedRunningTime="2026-01-22 00:09:39.882645515 +0000 UTC m=+157.918887427" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.882866 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.884106 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk"] Jan 22 00:09:39 crc kubenswrapper[4829]: E0122 00:09:39.884232 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.384169663 +0000 UTC m=+158.420411665 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.895559 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.913030 4829 csr.go:261] certificate signing request csr-sbc2c is approved, waiting to be issued Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.915369 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t"] Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.928642 4829 csr.go:257] certificate signing request csr-sbc2c is issued Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.946116 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-dj87x" podStartSLOduration=134.946101532 podStartE2EDuration="2m14.946101532s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:39.944527362 +0000 UTC m=+157.980769284" watchObservedRunningTime="2026-01-22 00:09:39.946101532 +0000 UTC m=+157.982343444" Jan 22 00:09:39 crc kubenswrapper[4829]: I0122 00:09:39.951992 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq"] Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:39.987060 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:39.991504 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.4914802 +0000 UTC m=+158.527722112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: W0122 00:09:40.094684 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5354ee45_800d_4a08_be4e_64c5ae44811e.slice/crio-10b2e473f3907448652a737921922cc0453a1dc404b00417a7aefbe0d2ae5311 WatchSource:0}: Error finding container 10b2e473f3907448652a737921922cc0453a1dc404b00417a7aefbe0d2ae5311: Status 404 returned error can't find the container with id 10b2e473f3907448652a737921922cc0453a1dc404b00417a7aefbe0d2ae5311 Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.095285 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.095661 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.595648998 +0000 UTC m=+158.631890910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.196379 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.197141 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.697123051 +0000 UTC m=+158.733364963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.299334 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.299715 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.799696228 +0000 UTC m=+158.835938210 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.341777 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:40 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:40 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:40 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.341847 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.400421 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.400658 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.900631725 +0000 UTC m=+158.936873627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.401810 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.402146 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:40.902134793 +0000 UTC m=+158.938376705 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.503982 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.504386 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.004358989 +0000 UTC m=+159.040600911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.504501 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.504783 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.004771282 +0000 UTC m=+159.041013194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.625526 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.626069 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.126050189 +0000 UTC m=+159.162292101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.727132 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.727573 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.227529393 +0000 UTC m=+159.263771305 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.830911 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.835266 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.335239222 +0000 UTC m=+159.371481144 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.835732 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.836233 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.336222643 +0000 UTC m=+159.372464555 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.913935 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9pmcq" event={"ID":"6db38a65-0f46-4da8-8984-f6966500afc4","Type":"ContainerStarted","Data":"ed717a383d5ce48899a97def86648aafabb1722f6a4d107984e50bdbd1e0dcde"} Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.919530 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" event={"ID":"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2","Type":"ContainerStarted","Data":"2aadee828d097b82591ce64814addaa5b81cba6cc37e139b1afc628f65f1aa64"} Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.929612 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-22 00:04:39 +0000 UTC, rotation deadline is 2026-10-23 18:23:57.954346208 +0000 UTC Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.929635 4829 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6594h14m17.024713296s for next certificate rotation Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.936555 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.936915 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9pmcq" podStartSLOduration=7.936887231 podStartE2EDuration="7.936887231s" podCreationTimestamp="2026-01-22 00:09:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:40.934239007 +0000 UTC m=+158.970480919" watchObservedRunningTime="2026-01-22 00:09:40.936887231 +0000 UTC m=+158.973129143" Jan 22 00:09:40 crc kubenswrapper[4829]: E0122 00:09:40.937468 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.437448089 +0000 UTC m=+159.473690001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.939752 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" event={"ID":"bd5d245d-600c-4790-89b2-3867c9e6279f","Type":"ContainerStarted","Data":"d9df75724aa57316a0ac1a80c8fa9fd83f49bc07b3f8c045ab37f6a6f7567e17"} Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.941272 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.941421 4829 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-5j8nc container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.941523 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" podUID="bd5d245d-600c-4790-89b2-3867c9e6279f" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.944854 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" event={"ID":"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b","Type":"ContainerStarted","Data":"440575b35d1148ddc39329388d4e3edd909825276be48ed31d64a377f98b81fe"} Jan 22 00:09:40 crc kubenswrapper[4829]: I0122 00:09:40.948484 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" event={"ID":"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c","Type":"ContainerStarted","Data":"c05d2312bd31b217f0d6a04b9adf58e33280fd779d4b6639c60cf1035bdf4103"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.017858 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" event={"ID":"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9","Type":"ContainerStarted","Data":"4910b9ed18b39cba484c33a568e1f074f944fed94d29b47f77ec62daf3a09939"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.030570 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" event={"ID":"2dcde760-e466-49aa-a092-1385876196ef","Type":"ContainerStarted","Data":"9375d371ad43f857534c32794260bdb25ee1236731ac7890ac1f7f40c3adacb4"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.031597 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.036427 4829 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-w7dnn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.036474 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" podUID="2dcde760-e466-49aa-a092-1385876196ef" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.038347 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.038714 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.538702735 +0000 UTC m=+159.574944647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.038801 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" event={"ID":"90f39ea3-632a-4845-b3b9-6ed24a762baa","Type":"ContainerStarted","Data":"492bb4ebd8b198b04953fa0b3437917fa024672c3efe9bb84188dc35a6f1f0f9"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.075263 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" podStartSLOduration=136.075246054 podStartE2EDuration="2m16.075246054s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.074612845 +0000 UTC m=+159.110854747" watchObservedRunningTime="2026-01-22 00:09:41.075246054 +0000 UTC m=+159.111487966" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.076990 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" podStartSLOduration=135.076978889 podStartE2EDuration="2m15.076978889s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:40.98134217 +0000 UTC m=+159.017584082" watchObservedRunningTime="2026-01-22 00:09:41.076978889 +0000 UTC m=+159.113220791" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.100447 4829 generic.go:334] "Generic (PLEG): container finished" podID="72778ee0-ec95-4ab0-867c-1997b47449f5" containerID="73a9527981406596f8c6118c6474bc2a2f19be542864c793f7ea8786f4c08c32" exitCode=0 Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.100530 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" event={"ID":"72778ee0-ec95-4ab0-867c-1997b47449f5","Type":"ContainerDied","Data":"73a9527981406596f8c6118c6474bc2a2f19be542864c793f7ea8786f4c08c32"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.114097 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" podStartSLOduration=135.114079497 podStartE2EDuration="2m15.114079497s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.107389396 +0000 UTC m=+159.143631328" watchObservedRunningTime="2026-01-22 00:09:41.114079497 +0000 UTC m=+159.150321409" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.139391 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.140350 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.640336163 +0000 UTC m=+159.676578075 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.196647 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" event={"ID":"db154a27-c1c5-498a-8184-7264c954bb47","Type":"ContainerStarted","Data":"155ddbee34b736bee52848662851357967fccfc6ca8c501df5b68acfd4f2602e"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.204563 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" event={"ID":"bae421e2-2b7d-465e-baeb-9cdca1f68dc3","Type":"ContainerStarted","Data":"797fc9e4c8bd8053577a8b1e818087233d9b3578449930ea1c5c94b41399a6b8"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.213348 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" event={"ID":"24923fa2-e2a6-408a-a342-6792e1baf637","Type":"ContainerStarted","Data":"a9f9aced7240b25c140744a661f3203e7fd2db4f1e2c22fb2bb2d85b7427e76a"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.245446 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.246801 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.746789533 +0000 UTC m=+159.783031445 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.363662 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.364705 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.864688532 +0000 UTC m=+159.900930434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.370033 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:41 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:41 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:41 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.370067 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.392928 4829 generic.go:334] "Generic (PLEG): container finished" podID="2e96a180-475f-4cdc-a89e-e0e61dbcbe53" containerID="12152d024cd4bdff823cbc2f48df4d3e4d2a823faead6aa8eeaf8169733acc99" exitCode=0 Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.393344 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" event={"ID":"2e96a180-475f-4cdc-a89e-e0e61dbcbe53","Type":"ContainerDied","Data":"12152d024cd4bdff823cbc2f48df4d3e4d2a823faead6aa8eeaf8169733acc99"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.405127 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" event={"ID":"5354ee45-800d-4a08-be4e-64c5ae44811e","Type":"ContainerStarted","Data":"10b2e473f3907448652a737921922cc0453a1dc404b00417a7aefbe0d2ae5311"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.427645 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-s5bvx" podStartSLOduration=135.427626313 podStartE2EDuration="2m15.427626313s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.283214719 +0000 UTC m=+159.319456631" watchObservedRunningTime="2026-01-22 00:09:41.427626313 +0000 UTC m=+159.463868225" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.466931 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.467325 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:41.967311422 +0000 UTC m=+160.003553334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.511248 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" event={"ID":"2ef479a2-183c-40f8-8bd7-e974de5a5305","Type":"ContainerStarted","Data":"2df684e5e976014807eb99b3e65bd3c9cdee3e4e01a09367a63b03da606e3276"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.547737 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" event={"ID":"3127e466-6f03-48c9-8d8b-6de53678192c","Type":"ContainerStarted","Data":"dc31604f250b6b77b901606074e4e6275d5c2404758898f7c7a7739c26756d8c"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.547970 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-jqdzd" podStartSLOduration=136.54795463 podStartE2EDuration="2m16.54795463s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.547114334 +0000 UTC m=+159.583356246" watchObservedRunningTime="2026-01-22 00:09:41.54795463 +0000 UTC m=+159.584196532" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.566866 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" event={"ID":"3916e963-6bce-4316-b02e-98b5565e8615","Type":"ContainerStarted","Data":"d7dbe703446edec0cda4e5e94d6c9ea8754582bb4336bba69227b6f9806f07f1"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.577233 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.578233 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.078213002 +0000 UTC m=+160.114454914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.586246 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" event={"ID":"2321c1dc-0a76-4b24-aed3-cd67eaf23486","Type":"ContainerStarted","Data":"b99d2ca4e40f48cf7ce593f12fed7f8248426806d2d3c6b9f96b34dea8f3f414"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.603734 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" event={"ID":"684b4808-40a6-495b-a8cf-7d48d54982bb","Type":"ContainerStarted","Data":"10e975f1238a47feefa6867047bff0ae3b2ecee280c6920102c131f453025a0a"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.615133 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8tq29" event={"ID":"9447fd29-4eae-4299-b266-1f5236931aee","Type":"ContainerStarted","Data":"ce130acb05813e2d5eacfa87edff5955bab74a59d5a2339f9a7b9ffc776c70ec"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.616025 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.618384 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.618427 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.624379 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2glr6" podStartSLOduration=136.624365224 podStartE2EDuration="2m16.624365224s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.623523728 +0000 UTC m=+159.659765650" watchObservedRunningTime="2026-01-22 00:09:41.624365224 +0000 UTC m=+159.660607136" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.624653 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" event={"ID":"f5d785d5-3f71-495f-99f0-fd89b646aec9","Type":"ContainerStarted","Data":"3cd8a9c0d154d10cd2c47b02c66870a80a243eac0154b002f4e786c431d85b09"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.644409 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" podStartSLOduration=135.644391085 podStartE2EDuration="2m15.644391085s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.643185057 +0000 UTC m=+159.679426989" watchObservedRunningTime="2026-01-22 00:09:41.644391085 +0000 UTC m=+159.680632997" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.662678 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ht6jq" event={"ID":"4e86e71a-0c33-41b1-854d-0ba4d03af4e6","Type":"ContainerStarted","Data":"3ec5ed6314df1ea6bf5fb6397c08f8ec3e1eb30dc1c24ccc403e5cfa5cba4af8"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.678638 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.683949 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.183931209 +0000 UTC m=+160.220173191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.714279 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" event={"ID":"dad2b02a-4000-44b8-acad-460b5a394c42","Type":"ContainerStarted","Data":"1abe9475ee462e1197f768634d8ad3c7107b03b21a95bac596c68226dd803fd4"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.730225 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" event={"ID":"562761b1-7f83-4d7a-b933-952fd9ad9963","Type":"ContainerStarted","Data":"b225c60cc4cdf92212247830a3ff3ffc1efcd09b4f69ab2e95d7b9a069d31b88"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.745944 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" podStartSLOduration=136.74592696 podStartE2EDuration="2m16.74592696s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.740964364 +0000 UTC m=+159.777206276" watchObservedRunningTime="2026-01-22 00:09:41.74592696 +0000 UTC m=+159.782168872" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.755976 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" event={"ID":"db0366e2-9560-4f1a-949c-66d4ddc09b89","Type":"ContainerStarted","Data":"7341342a64256bb0bd668c19babb29ec0bd2970655ffe15357bb3ed2f77c07e8"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.757321 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-8tq29" podStartSLOduration=136.757298388 podStartE2EDuration="2m16.757298388s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.714239553 +0000 UTC m=+159.750481465" watchObservedRunningTime="2026-01-22 00:09:41.757298388 +0000 UTC m=+159.793540340" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.786908 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-v546c" event={"ID":"52038e8d-9971-49b4-86fb-8062c0eea326","Type":"ContainerStarted","Data":"c3765eddcb940ff318a0d3e065f1cfcf63dbf3466c84e6068fd0470cdeb7765f"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.787451 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.787993 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.287973453 +0000 UTC m=+160.324215365 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.812794 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" event={"ID":"de89451c-cbc7-401d-ab19-f4ea8916fcb5","Type":"ContainerStarted","Data":"c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.822895 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" event={"ID":"a660b566-870f-40b9-a790-6e6f2baa1e90","Type":"ContainerStarted","Data":"5440cc9665f8d379975f011c6df27963534e366c343404fe77f160d660e2c583"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.826141 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2fdtt" event={"ID":"a6356f84-ed83-468e-b825-808d0aa2c7d4","Type":"ContainerStarted","Data":"3cf4f9d9e7a2f8ee4b00e9df7067488a7c4d873069860db9ab4346a9f1188ab1"} Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.831557 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-dj87x" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.835496 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" podStartSLOduration=135.835485538 podStartE2EDuration="2m15.835485538s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.801894131 +0000 UTC m=+159.838136053" watchObservedRunningTime="2026-01-22 00:09:41.835485538 +0000 UTC m=+159.871727450" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.862735 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.892421 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:41 crc kubenswrapper[4829]: E0122 00:09:41.897048 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.397030785 +0000 UTC m=+160.433272697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:41 crc kubenswrapper[4829]: I0122 00:09:41.963176 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" podStartSLOduration=135.963157045 podStartE2EDuration="2m15.963157045s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.852452002 +0000 UTC m=+159.888693914" watchObservedRunningTime="2026-01-22 00:09:41.963157045 +0000 UTC m=+159.999398957" Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.037004 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.037305 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.537290529 +0000 UTC m=+160.573532441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.078450 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-2fdtt" podStartSLOduration=137.078433633 podStartE2EDuration="2m17.078433633s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:41.971948212 +0000 UTC m=+160.008190124" watchObservedRunningTime="2026-01-22 00:09:42.078433633 +0000 UTC m=+160.114675545" Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.139920 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.140296 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.6402833 +0000 UTC m=+160.676525212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.240497 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.241041 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.74102541 +0000 UTC m=+160.777267322 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.344328 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.344657 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.844646321 +0000 UTC m=+160.880888233 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.359744 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:42 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:42 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:42 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.359802 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.445092 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.445416 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:42.945400251 +0000 UTC m=+160.981642163 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.547193 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.547555 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.047528856 +0000 UTC m=+161.083770768 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.648802 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.649096 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.149080411 +0000 UTC m=+161.185322313 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.783806 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.784319 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.284300966 +0000 UTC m=+161.320542898 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.813738 4829 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-26xkj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.35:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.813853 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" podUID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.35:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.871835 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" event={"ID":"a660b566-870f-40b9-a790-6e6f2baa1e90","Type":"ContainerStarted","Data":"0824a02947027fee2cbd5d059929669533aaf965c94a5d0639675d7edea64c74"} Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.884523 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:42 crc kubenswrapper[4829]: E0122 00:09:42.884828 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.384814099 +0000 UTC m=+161.421056011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.885014 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2fdtt" event={"ID":"a6356f84-ed83-468e-b825-808d0aa2c7d4","Type":"ContainerStarted","Data":"3376f8d769acfd1525f5f899baefb99a60ad14007e0e8d1cbf74d9a015787ffc"} Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.951134 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-t695d" event={"ID":"684b4808-40a6-495b-a8cf-7d48d54982bb","Type":"ContainerStarted","Data":"f38ccb1a99e46617da665d1fd6b46db2b6f5573d658741e4d1b3531adebd3515"} Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.957790 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" event={"ID":"db154a27-c1c5-498a-8184-7264c954bb47","Type":"ContainerStarted","Data":"65b4f0a06f6f043aafe8b777a5ce82c746f316c38d279e6d59e5b8344c72b709"} Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.958229 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.960884 4829 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-gfvpq container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.960916 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" podUID="db154a27-c1c5-498a-8184-7264c954bb47" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Jan 22 00:09:42 crc kubenswrapper[4829]: I0122 00:09:42.983957 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" event={"ID":"f0f51904-83ab-43d1-87ef-8d7efbe1a6c8","Type":"ContainerStarted","Data":"56f7a6add26b0068a1b5178c21cbd6f09a66ecced15b8a3c93c02dd8ed720938"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:42.991561 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:42.992637 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.492625302 +0000 UTC m=+161.528867214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:42.994578 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" event={"ID":"5354ee45-800d-4a08-be4e-64c5ae44811e","Type":"ContainerStarted","Data":"d4cc4751137f6e178df6f1264bfff853598d621919577e39c101817fbf17c2aa"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.093179 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.094923 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.59489862 +0000 UTC m=+161.631140532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.166997 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-zkmjd" event={"ID":"dad2b02a-4000-44b8-acad-460b5a394c42","Type":"ContainerStarted","Data":"47a45d7d135f0e0b3eb8941cd88d3b3468bb41a8aa619836201187df82af01c4"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.188395 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" event={"ID":"562761b1-7f83-4d7a-b933-952fd9ad9963","Type":"ContainerStarted","Data":"c8f8c94f66556c5fc728782b7198e43ad96fd294bbbad457e4088d8500d16d64"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.188447 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" event={"ID":"562761b1-7f83-4d7a-b933-952fd9ad9963","Type":"ContainerStarted","Data":"9fb49d6efb16ba378c153d01a2edf5ad8035651d0eb88c77709ee7f650fe6533"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.188798 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.205985 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.206444 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.70642086 +0000 UTC m=+161.742662782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.263584 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cvqvz" event={"ID":"db0366e2-9560-4f1a-949c-66d4ddc09b89","Type":"ContainerStarted","Data":"58f9ccd5adbea5ef6e1a86373be91e113e70fa1b6de6bc93a9aebce65098ad0c"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.278113 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-v546c" event={"ID":"52038e8d-9971-49b4-86fb-8062c0eea326","Type":"ContainerStarted","Data":"31636f2b4dfddfffe711fa1d24dc94d324169ab8a13b60caf6441d22f154e456"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.295357 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" event={"ID":"3127e466-6f03-48c9-8d8b-6de53678192c","Type":"ContainerStarted","Data":"1f22f5a68108d34cd08fc21d5039716cdab9e6ef507ba965c39ee0daef766841"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.301221 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" event={"ID":"8c53a79b-fc2c-48ec-8aa5-14cd282b8e6b","Type":"ContainerStarted","Data":"d9e995cf52c8d506a25e9fae08266161da1df455f7f2e1b49bda028c1e932c47"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.304076 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" event={"ID":"7f619c30-40fb-46a4-956e-366f2192703e","Type":"ContainerStarted","Data":"535217eff7f987e988b2d7a9aa3e9d70f249c27ae7ad9afa25329a0bb5151e66"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.304620 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.313088 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.313441 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.813402666 +0000 UTC m=+161.849644578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.313981 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.315748 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.815728609 +0000 UTC m=+161.851970581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.329152 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" event={"ID":"d1166d09-4d81-47db-803f-316f64bac8a7","Type":"ContainerStarted","Data":"31046c82da7bd434e8868a4f17de019cd541381a1194f9682c923e9de5b86ee7"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.335134 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" event={"ID":"90f39ea3-632a-4845-b3b9-6ed24a762baa","Type":"ContainerStarted","Data":"7cc25a98996deaea00bc68d02a15b76161215732bde5d05eb947cba92921bdc8"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.342608 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:43 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:43 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:43 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.342689 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.365519 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-sg4kz" event={"ID":"f5d785d5-3f71-495f-99f0-fd89b646aec9","Type":"ContainerStarted","Data":"156cf725fb689b6ef41046adc107dcca790337d22d3128a0dd0f610aeff1b13e"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.382602 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" event={"ID":"bae421e2-2b7d-465e-baeb-9cdca1f68dc3","Type":"ContainerStarted","Data":"3947e799b9ff7dbeea5fbbd2ae61708eda16c811f6edb1a6130b04dcfe2524eb"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.384126 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.412674 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" event={"ID":"1ecf6e43-b0bb-454a-9165-2e5e44ddba3c","Type":"ContainerStarted","Data":"928604600be3c60e57e44f4a3e34fd9e89d7ad98d65ee35ca521da0e087fa47b"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.415439 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.416921 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:43.916906293 +0000 UTC m=+161.953148205 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.452217 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tw5xj"] Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.453166 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tw5xj"] Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.453261 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.465026 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.474399 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" event={"ID":"3916e963-6bce-4316-b02e-98b5565e8615","Type":"ContainerStarted","Data":"31c52466c2dee9b1f845d15d42ec1d058e203a99a3ef2e16fb4c13e191ed62fb"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.474450 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" event={"ID":"3916e963-6bce-4316-b02e-98b5565e8615","Type":"ContainerStarted","Data":"951af64dc2cd43843059491ffab05ee62163fafc5050d807cabda2225e3e7c2c"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.479845 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" event={"ID":"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9","Type":"ContainerStarted","Data":"945e7215b35c6a762902d687d8410eb7be115fac49c7fdd0e3ad36070e81f81b"} Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.493164 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" podStartSLOduration=137.493142923 podStartE2EDuration="2m17.493142923s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.483824629 +0000 UTC m=+161.520066541" watchObservedRunningTime="2026-01-22 00:09:43.493142923 +0000 UTC m=+161.529384835" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.493354 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.493409 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.501123 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.509758 4829 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-w7dnn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.509851 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" podUID="2dcde760-e466-49aa-a092-1385876196ef" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.519049 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5j8nc" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.520968 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq6nc\" (UniqueName: \"kubernetes.io/projected/622d8532-4cca-4f15-972e-373735e2a5c1-kube-api-access-xq6nc\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.521050 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-catalog-content\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.521082 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.521203 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-utilities\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.521926 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.021912168 +0000 UTC m=+162.058154070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.524199 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-2g8pk" podStartSLOduration=137.524177119 podStartE2EDuration="2m17.524177119s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.521513686 +0000 UTC m=+161.557755608" watchObservedRunningTime="2026-01-22 00:09:43.524177119 +0000 UTC m=+161.560419031" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.609316 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6bgqw" podStartSLOduration=138.609278827 podStartE2EDuration="2m18.609278827s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.567767561 +0000 UTC m=+161.604009493" watchObservedRunningTime="2026-01-22 00:09:43.609278827 +0000 UTC m=+161.645520759" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.625823 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-vcq2q" podStartSLOduration=138.625796537 podStartE2EDuration="2m18.625796537s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.610363311 +0000 UTC m=+161.646605223" watchObservedRunningTime="2026-01-22 00:09:43.625796537 +0000 UTC m=+161.662038449" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.626765 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.627242 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq6nc\" (UniqueName: \"kubernetes.io/projected/622d8532-4cca-4f15-972e-373735e2a5c1-kube-api-access-xq6nc\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.627468 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-catalog-content\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.627684 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-utilities\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.654025 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-catalog-content\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.655020 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hp7g2"] Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.663930 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.690249 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-utilities\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.703744 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hp7g2"] Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.706442 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.206412724 +0000 UTC m=+162.242654646 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.723304 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.730053 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" podStartSLOduration=137.730019226 podStartE2EDuration="2m17.730019226s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.674012394 +0000 UTC m=+161.710254306" watchObservedRunningTime="2026-01-22 00:09:43.730019226 +0000 UTC m=+161.766261148" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.791605 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq6nc\" (UniqueName: \"kubernetes.io/projected/622d8532-4cca-4f15-972e-373735e2a5c1-kube-api-access-xq6nc\") pod \"community-operators-tw5xj\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.792586 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-utilities\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.792746 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.792849 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-catalog-content\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.792960 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm4pv\" (UniqueName: \"kubernetes.io/projected/4802cff4-7d5d-4af4-9ae0-7816748f46b3-kube-api-access-dm4pv\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.793304 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.293292248 +0000 UTC m=+162.329534160 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.800228 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" podStartSLOduration=137.800194475 podStartE2EDuration="2m17.800194475s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.722959175 +0000 UTC m=+161.759201107" watchObservedRunningTime="2026-01-22 00:09:43.800194475 +0000 UTC m=+161.836436387" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.826669 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.862495 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-v546c" podStartSLOduration=138.862471065 podStartE2EDuration="2m18.862471065s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.831899353 +0000 UTC m=+161.868141265" watchObservedRunningTime="2026-01-22 00:09:43.862471065 +0000 UTC m=+161.898712977" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.863641 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z7h8p"] Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.869635 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.893860 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.894050 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-utilities\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.894105 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-catalog-content\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.894157 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm4pv\" (UniqueName: \"kubernetes.io/projected/4802cff4-7d5d-4af4-9ae0-7816748f46b3-kube-api-access-dm4pv\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.894451 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.394437 +0000 UTC m=+162.430678912 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.895054 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-utilities\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.895251 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-catalog-content\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.900107 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-q9s2c" podStartSLOduration=138.900085018 podStartE2EDuration="2m18.900085018s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.88867134 +0000 UTC m=+161.924913252" watchObservedRunningTime="2026-01-22 00:09:43.900085018 +0000 UTC m=+161.936326930" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.922338 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z7h8p"] Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.949137 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" podStartSLOduration=137.949122802 podStartE2EDuration="2m17.949122802s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:43.945426735 +0000 UTC m=+161.981668657" watchObservedRunningTime="2026-01-22 00:09:43.949122802 +0000 UTC m=+161.985364714" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.994917 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-utilities\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.994986 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-catalog-content\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.995006 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jqvm\" (UniqueName: \"kubernetes.io/projected/12c22bf1-09d4-4a41-b977-87595cc90757-kube-api-access-8jqvm\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.995064 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:43 crc kubenswrapper[4829]: E0122 00:09:43.995344 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.495332095 +0000 UTC m=+162.531574007 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:43 crc kubenswrapper[4829]: I0122 00:09:43.996113 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm4pv\" (UniqueName: \"kubernetes.io/projected/4802cff4-7d5d-4af4-9ae0-7816748f46b3-kube-api-access-dm4pv\") pod \"certified-operators-hp7g2\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.014094 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-qxrp6" podStartSLOduration=138.014078796 podStartE2EDuration="2m18.014078796s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:44.011985729 +0000 UTC m=+162.048227641" watchObservedRunningTime="2026-01-22 00:09:44.014078796 +0000 UTC m=+162.050320708" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.038449 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j66zg"] Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.042182 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.055311 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" podStartSLOduration=139.055295423 podStartE2EDuration="2m19.055295423s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:44.053570839 +0000 UTC m=+162.089812751" watchObservedRunningTime="2026-01-22 00:09:44.055295423 +0000 UTC m=+162.091537335" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.096683 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.096959 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.596920012 +0000 UTC m=+162.633161914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.097045 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.097127 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-utilities\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.097197 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-catalog-content\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.097216 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jqvm\" (UniqueName: \"kubernetes.io/projected/12c22bf1-09d4-4a41-b977-87595cc90757-kube-api-access-8jqvm\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.097841 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.597833471 +0000 UTC m=+162.634075383 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.098244 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-utilities\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.098528 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-catalog-content\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.101252 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j66zg"] Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.105112 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-vh68c" podStartSLOduration=139.105080569 podStartE2EDuration="2m19.105080569s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:44.090912273 +0000 UTC m=+162.127154185" watchObservedRunningTime="2026-01-22 00:09:44.105080569 +0000 UTC m=+162.141322481" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.166362 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jqvm\" (UniqueName: \"kubernetes.io/projected/12c22bf1-09d4-4a41-b977-87595cc90757-kube-api-access-8jqvm\") pod \"community-operators-z7h8p\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.197214 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.199660 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.199882 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-catalog-content\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.199955 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdh8w\" (UniqueName: \"kubernetes.io/projected/984817e5-ea2b-4a58-a082-0e83447b116a-kube-api-access-hdh8w\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.199985 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-utilities\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.200131 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.70011606 +0000 UTC m=+162.736357962 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.205809 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.278745 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-nxg4z" podStartSLOduration=138.278730804 podStartE2EDuration="2m18.278730804s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:44.220811311 +0000 UTC m=+162.257053223" watchObservedRunningTime="2026-01-22 00:09:44.278730804 +0000 UTC m=+162.314972716" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.301343 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-utilities\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.301418 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.301479 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-catalog-content\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.301522 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdh8w\" (UniqueName: \"kubernetes.io/projected/984817e5-ea2b-4a58-a082-0e83447b116a-kube-api-access-hdh8w\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.301953 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-catalog-content\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.302021 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.802001826 +0000 UTC m=+162.838243818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.302333 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-utilities\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.337805 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdh8w\" (UniqueName: \"kubernetes.io/projected/984817e5-ea2b-4a58-a082-0e83447b116a-kube-api-access-hdh8w\") pod \"certified-operators-j66zg\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.341347 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:44 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:44 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:44 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.341609 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.375124 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.384589 4829 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-kzt7t container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.384666 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" podUID="bae421e2-2b7d-465e-baeb-9cdca1f68dc3" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.41:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.405224 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.405549 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:44.905521144 +0000 UTC m=+162.941763056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.508036 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.508509 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.008494204 +0000 UTC m=+163.044736116 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.609297 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.609770 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.10972494 +0000 UTC m=+163.145966852 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.649799 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" event={"ID":"72778ee0-ec95-4ab0-867c-1997b47449f5","Type":"ContainerStarted","Data":"005291290eea0d85c569cc7f57cf4f30eb2a362672f2b46ee9a224578d68911e"} Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.649871 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" event={"ID":"72778ee0-ec95-4ab0-867c-1997b47449f5","Type":"ContainerStarted","Data":"a3242d9d5766befe26127ddf57e43a64da2e48a35ec802e3e06d40d1f014bb62"} Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.685353 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ht6jq" event={"ID":"4e86e71a-0c33-41b1-854d-0ba4d03af4e6","Type":"ContainerStarted","Data":"76b6f538a49d3fd68fa36ea49dad51bbccfea5d3628dfccafe644dd4734a4933"} Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.686163 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.717119 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.758710 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" event={"ID":"7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2","Type":"ContainerStarted","Data":"bab374bcaefe57ddd5dd52eabee1ec31bbb8f51586cf8dc55a233934975f0bd5"} Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.760345 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.260324319 +0000 UTC m=+163.296566231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.777440 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" event={"ID":"2e96a180-475f-4cdc-a89e-e0e61dbcbe53","Type":"ContainerStarted","Data":"ade789636f4c5fbfa24d63b9fbb9c7df5853af9f05a20ff1ee6ed48bc11c7a9c"} Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.816079 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" event={"ID":"a660b566-870f-40b9-a790-6e6f2baa1e90","Type":"ContainerStarted","Data":"aa36489cb09f6fe0bde6654d2d85eb5823ee63402e0c1502314d33562e2f2dd6"} Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.820251 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.820689 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.320661558 +0000 UTC m=+163.356903480 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.821018 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.824661 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.324635673 +0000 UTC m=+163.360877585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.851258 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nld4t" event={"ID":"3127e466-6f03-48c9-8d8b-6de53678192c","Type":"ContainerStarted","Data":"567688dac3e69e610c2e9b027fb9fa8f83ec41111b850afda2d0126e8c0539f3"} Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.860822 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-v546c" event={"ID":"52038e8d-9971-49b4-86fb-8062c0eea326","Type":"ContainerStarted","Data":"6a6c595b42303032c20b74d8ba7b3334bc62f6bb09f3c090d93caa122ac4d88d"} Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.864657 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.864705 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.874060 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.878838 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gfvpq" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.914400 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" Jan 22 00:09:44 crc kubenswrapper[4829]: I0122 00:09:44.922461 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:44 crc kubenswrapper[4829]: E0122 00:09:44.923294 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.423260956 +0000 UTC m=+163.459502868 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.059450 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.060381 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.560364561 +0000 UTC m=+163.596606473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.162240 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.162768 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.662748412 +0000 UTC m=+163.698990324 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.236188 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-6l5hd" podStartSLOduration=139.236172153 podStartE2EDuration="2m19.236172153s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:45.233157248 +0000 UTC m=+163.269399180" watchObservedRunningTime="2026-01-22 00:09:45.236172153 +0000 UTC m=+163.272414065" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.263805 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.264452 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.764435753 +0000 UTC m=+163.800677675 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.281473 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tw5xj"] Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.308414 4829 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.390087 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.390797 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:45.890777188 +0000 UTC m=+163.927019110 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.397600 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:45 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:45 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:45 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.397652 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.400987 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" podStartSLOduration=140.400971089 podStartE2EDuration="2m20.400971089s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:45.399228524 +0000 UTC m=+163.435470456" watchObservedRunningTime="2026-01-22 00:09:45.400971089 +0000 UTC m=+163.437213001" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.428919 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.429581 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.457447 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.457724 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.502672 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.504747 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:46.004724694 +0000 UTC m=+164.040966676 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.510761 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.637289 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.637877 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:46.137850414 +0000 UTC m=+164.174092336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.638011 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c0b3871-5d0c-43d2-90e2-12438a699020-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.638273 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.638455 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c0b3871-5d0c-43d2-90e2-12438a699020-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.638908 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:46.138894726 +0000 UTC m=+164.175136638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.643322 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-kzt7t" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.672984 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-ht6jq" podStartSLOduration=12.672967668 podStartE2EDuration="12.672967668s" podCreationTimestamp="2026-01-22 00:09:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:45.576895955 +0000 UTC m=+163.613137877" watchObservedRunningTime="2026-01-22 00:09:45.672967668 +0000 UTC m=+163.709209580" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.739304 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.739570 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c0b3871-5d0c-43d2-90e2-12438a699020-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.739599 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c0b3871-5d0c-43d2-90e2-12438a699020-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.739954 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:46.239939466 +0000 UTC m=+164.276181378 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.739983 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c0b3871-5d0c-43d2-90e2-12438a699020-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.747554 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" podStartSLOduration=139.747528825 podStartE2EDuration="2m19.747528825s" podCreationTimestamp="2026-01-22 00:07:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:45.679866116 +0000 UTC m=+163.716108038" watchObservedRunningTime="2026-01-22 00:09:45.747528825 +0000 UTC m=+163.783770737" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.841656 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c0b3871-5d0c-43d2-90e2-12438a699020-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.841791 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.842204 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 00:09:46.342190014 +0000 UTC m=+164.378431926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-6ltv9" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.865701 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z7h8p"] Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.878532 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" event={"ID":"a660b566-870f-40b9-a790-6e6f2baa1e90","Type":"ContainerStarted","Data":"b5ab8d7d019ae05318c6166c8c971fa787c8ed58c13a3d95026567bf4fb9c0b7"} Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.880238 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tw5xj" event={"ID":"622d8532-4cca-4f15-972e-373735e2a5c1","Type":"ContainerStarted","Data":"596ea893f59b47533e844e2f055e9b849309e579c1fb69da23f34769af27fdde"} Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.894589 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hp7g2"] Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.942695 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:45 crc kubenswrapper[4829]: E0122 00:09:45.943215 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 00:09:46.443196003 +0000 UTC m=+164.479437915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.982367 4829 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-22T00:09:45.308434407Z","Handler":null,"Name":""} Jan 22 00:09:45 crc kubenswrapper[4829]: W0122 00:09:45.989148 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4802cff4_7d5d_4af4_9ae0_7816748f46b3.slice/crio-96cf8cb0530c5093268e4a53d648da6485d40abc0d8f45d7a553dc323f4d59d9 WatchSource:0}: Error finding container 96cf8cb0530c5093268e4a53d648da6485d40abc0d8f45d7a553dc323f4d59d9: Status 404 returned error can't find the container with id 96cf8cb0530c5093268e4a53d648da6485d40abc0d8f45d7a553dc323f4d59d9 Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.992033 4829 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 22 00:09:45 crc kubenswrapper[4829]: I0122 00:09:45.992158 4829 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.000203 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.000566 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.023047 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.023484 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.042579 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j66zg"] Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.047669 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.079568 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bnqwm"] Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.084462 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.106943 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.107937 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.116234 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bnqwm"] Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.145460 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.145661 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.145719 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.147099 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.148676 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-catalog-content\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.148731 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-utilities\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.148806 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhrsr\" (UniqueName: \"kubernetes.io/projected/76530c0d-9597-4099-b9c3-f375bd12b26c-kube-api-access-fhrsr\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.167580 4829 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.167647 4829 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.250184 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhrsr\" (UniqueName: \"kubernetes.io/projected/76530c0d-9597-4099-b9c3-f375bd12b26c-kube-api-access-fhrsr\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.250274 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-catalog-content\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.250309 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-utilities\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.251220 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-utilities\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.257788 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-catalog-content\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.332511 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.332556 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.332567 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.332982 4829 patch_prober.go:28] interesting pod/console-f9d7485db-2fdtt container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.333031 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2fdtt" podUID="a6356f84-ed83-468e-b825-808d0aa2c7d4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.419845 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:46 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:46 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:46 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.419889 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.763123 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhrsr\" (UniqueName: \"kubernetes.io/projected/76530c0d-9597-4099-b9c3-f375bd12b26c-kube-api-access-fhrsr\") pod \"redhat-marketplace-bnqwm\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.794152 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tnwgc"] Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.795465 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.893024 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-6ltv9\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.915033 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmvx2\" (UniqueName: \"kubernetes.io/projected/61af08cf-4127-439f-abd2-35fb5ad7dd2f-kube-api-access-vmvx2\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.934577 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-catalog-content\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.936071 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-utilities\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.976036 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnwgc"] Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.998611 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6pznq"] Jan 22 00:09:46 crc kubenswrapper[4829]: I0122 00:09:46.999774 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.002058 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-njstk"] Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.003198 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.016002 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j66zg" event={"ID":"984817e5-ea2b-4a58-a082-0e83447b116a","Type":"ContainerStarted","Data":"c5674c128a2523c527ce7028cb1c0aec21dc993cc448e2b2ecf4f6ed32a33450"} Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.020966 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z7h8p" event={"ID":"12c22bf1-09d4-4a41-b977-87595cc90757","Type":"ContainerStarted","Data":"ca1f83bb37e0a6d945a0aa709c142425553495976663691ebcccefaa7a760256"} Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.023219 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" event={"ID":"a660b566-870f-40b9-a790-6e6f2baa1e90","Type":"ContainerStarted","Data":"72721c6b54776ec18f1dbe8e616e45d1b92e47634e2470e0c49767386e7a633e"} Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.024813 4829 generic.go:334] "Generic (PLEG): container finished" podID="622d8532-4cca-4f15-972e-373735e2a5c1" containerID="97af89874a5fdaeb3864694d13cde484d690d091e5b369556f1ac47ecf82bb57" exitCode=0 Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.024948 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tw5xj" event={"ID":"622d8532-4cca-4f15-972e-373735e2a5c1","Type":"ContainerDied","Data":"97af89874a5fdaeb3864694d13cde484d690d091e5b369556f1ac47ecf82bb57"} Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.026532 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.036424 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hp7g2" event={"ID":"4802cff4-7d5d-4af4-9ae0-7816748f46b3","Type":"ContainerStarted","Data":"96cf8cb0530c5093268e4a53d648da6485d40abc0d8f45d7a553dc323f4d59d9"} Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.039408 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-catalog-content\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.039479 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-utilities\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.039531 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-utilities\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.042620 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmvx2\" (UniqueName: \"kubernetes.io/projected/61af08cf-4127-439f-abd2-35fb5ad7dd2f-kube-api-access-vmvx2\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.042722 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-catalog-content\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.042756 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-catalog-content\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.042782 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-utilities\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.042819 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6llc\" (UniqueName: \"kubernetes.io/projected/08ce8375-3a53-43e6-a7c1-3ce32698965d-kube-api-access-v6llc\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.042853 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmvjp\" (UniqueName: \"kubernetes.io/projected/df877a86-a205-463a-a771-1dee5d9750f8-kube-api-access-jmvjp\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.043481 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-catalog-content\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.043611 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-utilities\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.051320 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.144460 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6llc\" (UniqueName: \"kubernetes.io/projected/08ce8375-3a53-43e6-a7c1-3ce32698965d-kube-api-access-v6llc\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.144531 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmvjp\" (UniqueName: \"kubernetes.io/projected/df877a86-a205-463a-a771-1dee5d9750f8-kube-api-access-jmvjp\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.144606 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-catalog-content\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.144640 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-utilities\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.144736 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-utilities\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.144903 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-catalog-content\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.147083 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-utilities\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.148300 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-catalog-content\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.157926 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-catalog-content\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.168308 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-utilities\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.231910 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-njstk"] Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.255433 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.281089 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6pznq"] Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.281249 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.286963 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.384618 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:47 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:47 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:47 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.385056 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.516892 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.526522 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.528809 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmvjp\" (UniqueName: \"kubernetes.io/projected/df877a86-a205-463a-a771-1dee5d9750f8-kube-api-access-jmvjp\") pod \"redhat-operators-njstk\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.537260 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6llc\" (UniqueName: \"kubernetes.io/projected/08ce8375-3a53-43e6-a7c1-3ce32698965d-kube-api-access-v6llc\") pod \"redhat-operators-6pznq\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.557977 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.594613 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmvx2\" (UniqueName: \"kubernetes.io/projected/61af08cf-4127-439f-abd2-35fb5ad7dd2f-kube-api-access-vmvx2\") pod \"redhat-marketplace-tnwgc\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:47 crc kubenswrapper[4829]: W0122 00:09:47.811632 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod6c0b3871_5d0c_43d2_90e2_12438a699020.slice/crio-30fa555ca06ccd0c49ccd21de05a0e394967baa829a52d41ac910fe5446309ab WatchSource:0}: Error finding container 30fa555ca06ccd0c49ccd21de05a0e394967baa829a52d41ac910fe5446309ab: Status 404 returned error can't find the container with id 30fa555ca06ccd0c49ccd21de05a0e394967baa829a52d41ac910fe5446309ab Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.850113 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.874113 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.902450 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:09:47 crc kubenswrapper[4829]: I0122 00:09:47.983431 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-q2gtb" podStartSLOduration=13.983394875 podStartE2EDuration="13.983394875s" podCreationTimestamp="2026-01-22 00:09:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:47.943417066 +0000 UTC m=+165.979658978" watchObservedRunningTime="2026-01-22 00:09:47.983394875 +0000 UTC m=+166.019636787" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.168141 4829 generic.go:334] "Generic (PLEG): container finished" podID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerID="be6289065fa01ac9048c6f859a36863b9e631091b3ff679cac01d94d4d0c6496" exitCode=0 Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.168489 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hp7g2" event={"ID":"4802cff4-7d5d-4af4-9ae0-7816748f46b3","Type":"ContainerDied","Data":"be6289065fa01ac9048c6f859a36863b9e631091b3ff679cac01d94d4d0c6496"} Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.184265 4829 generic.go:334] "Generic (PLEG): container finished" podID="12c22bf1-09d4-4a41-b977-87595cc90757" containerID="e24fe241c1b9585af07bb8be5ffb27009b6c427fdfdf7e8b683d7c82333c741d" exitCode=0 Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.184323 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z7h8p" event={"ID":"12c22bf1-09d4-4a41-b977-87595cc90757","Type":"ContainerDied","Data":"e24fe241c1b9585af07bb8be5ffb27009b6c427fdfdf7e8b683d7c82333c741d"} Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.195836 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6c0b3871-5d0c-43d2-90e2-12438a699020","Type":"ContainerStarted","Data":"30fa555ca06ccd0c49ccd21de05a0e394967baa829a52d41ac910fe5446309ab"} Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.217196 4829 generic.go:334] "Generic (PLEG): container finished" podID="984817e5-ea2b-4a58-a082-0e83447b116a" containerID="16dfca33f972a384bd379840456043faff05d7d836d95f695f103d5c73503e28" exitCode=0 Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.217276 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j66zg" event={"ID":"984817e5-ea2b-4a58-a082-0e83447b116a","Type":"ContainerDied","Data":"16dfca33f972a384bd379840456043faff05d7d836d95f695f103d5c73503e28"} Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.223552 4829 generic.go:334] "Generic (PLEG): container finished" podID="c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" containerID="945e7215b35c6a762902d687d8410eb7be115fac49c7fdd0e3ad36070e81f81b" exitCode=0 Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.224282 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" event={"ID":"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9","Type":"ContainerDied","Data":"945e7215b35c6a762902d687d8410eb7be115fac49c7fdd0e3ad36070e81f81b"} Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.240485 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8dsgz" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.322742 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bnqwm"] Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.343069 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:48 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:48 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:48 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.343122 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.593215 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.636284 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.653081 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74beaade-c8f6-4d34-842b-1c03fe72b195-metrics-certs\") pod \"network-metrics-daemon-c82dd\" (UID: \"74beaade-c8f6-4d34-842b-1c03fe72b195\") " pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.691605 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-c82dd" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.709463 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6ltv9"] Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.773249 4829 patch_prober.go:28] interesting pod/apiserver-76f77b778f-gwk42 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]log ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]etcd ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/generic-apiserver-start-informers ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/max-in-flight-filter ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 22 00:09:48 crc kubenswrapper[4829]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 22 00:09:48 crc kubenswrapper[4829]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/project.openshift.io-projectcache ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/openshift.io-startinformers ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 22 00:09:48 crc kubenswrapper[4829]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 22 00:09:48 crc kubenswrapper[4829]: livez check failed Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.775229 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" podUID="72778ee0-ec95-4ab0-867c-1997b47449f5" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:48 crc kubenswrapper[4829]: I0122 00:09:48.840903 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6pznq"] Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.039565 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-c82dd"] Jan 22 00:09:49 crc kubenswrapper[4829]: W0122 00:09:49.047026 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74beaade_c8f6_4d34_842b_1c03fe72b195.slice/crio-7080a59cfe6a200204dd2c6ead738364f4ab22b2e3ebf5c270dd3426b95678a1 WatchSource:0}: Error finding container 7080a59cfe6a200204dd2c6ead738364f4ab22b2e3ebf5c270dd3426b95678a1: Status 404 returned error can't find the container with id 7080a59cfe6a200204dd2c6ead738364f4ab22b2e3ebf5c270dd3426b95678a1 Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.075003 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnwgc"] Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.120002 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-njstk"] Jan 22 00:09:49 crc kubenswrapper[4829]: W0122 00:09:49.132897 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf877a86_a205_463a_a771_1dee5d9750f8.slice/crio-3e33332956cc6fb8c82732ffc0a32ab0568418a073381bae06b72c3636cc2b92 WatchSource:0}: Error finding container 3e33332956cc6fb8c82732ffc0a32ab0568418a073381bae06b72c3636cc2b92: Status 404 returned error can't find the container with id 3e33332956cc6fb8c82732ffc0a32ab0568418a073381bae06b72c3636cc2b92 Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.334608 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6c0b3871-5d0c-43d2-90e2-12438a699020","Type":"ContainerStarted","Data":"9a7ebcfea738128f1e98636f69fbbd5830cd79083afc3aa614ae4164cc0ed605"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.336451 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:49 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:49 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:49 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.336488 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.341938 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c82dd" event={"ID":"74beaade-c8f6-4d34-842b-1c03fe72b195","Type":"ContainerStarted","Data":"7080a59cfe6a200204dd2c6ead738364f4ab22b2e3ebf5c270dd3426b95678a1"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.360610 4829 generic.go:334] "Generic (PLEG): container finished" podID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerID="1c986d433a96ac6f44c57098d21ecb2d1ead18c25957402b311df1be01c91656" exitCode=0 Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.360743 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bnqwm" event={"ID":"76530c0d-9597-4099-b9c3-f375bd12b26c","Type":"ContainerDied","Data":"1c986d433a96ac6f44c57098d21ecb2d1ead18c25957402b311df1be01c91656"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.360787 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bnqwm" event={"ID":"76530c0d-9597-4099-b9c3-f375bd12b26c","Type":"ContainerStarted","Data":"dcdde47aa6ff1f296b8d94867a012040a5905344d3c78a1c00d99d3208fdad81"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.364985 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.36496504 podStartE2EDuration="4.36496504s" podCreationTimestamp="2026-01-22 00:09:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:49.356823424 +0000 UTC m=+167.393065356" watchObservedRunningTime="2026-01-22 00:09:49.36496504 +0000 UTC m=+167.401206952" Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.374322 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njstk" event={"ID":"df877a86-a205-463a-a771-1dee5d9750f8","Type":"ContainerStarted","Data":"3e33332956cc6fb8c82732ffc0a32ab0568418a073381bae06b72c3636cc2b92"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.375923 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnwgc" event={"ID":"61af08cf-4127-439f-abd2-35fb5ad7dd2f","Type":"ContainerStarted","Data":"5eaa81f52d3a19262a6195ef934f9d089f66898812524fa8bf7b7aafdecdc606"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.377803 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" event={"ID":"7755e10e-4f27-44b7-b510-a8e4b5e53e1d","Type":"ContainerStarted","Data":"6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.377848 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" event={"ID":"7755e10e-4f27-44b7-b510-a8e4b5e53e1d","Type":"ContainerStarted","Data":"edc4ef08a9220353663d5d5a372f8136d2abbffa15e66d8521dc4ea2712e9268"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.377894 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.379741 4829 generic.go:334] "Generic (PLEG): container finished" podID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerID="c3be5b291940743def002ca77afc0710f2e9686dc720ae4583e50bda28bea922" exitCode=0 Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.379787 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pznq" event={"ID":"08ce8375-3a53-43e6-a7c1-3ce32698965d","Type":"ContainerDied","Data":"c3be5b291940743def002ca77afc0710f2e9686dc720ae4583e50bda28bea922"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.379827 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pznq" event={"ID":"08ce8375-3a53-43e6-a7c1-3ce32698965d","Type":"ContainerStarted","Data":"016f3fa5e913461945b1f5cf11f1639ada0f54134a0b9795df89c831fd8e6e29"} Jan 22 00:09:49 crc kubenswrapper[4829]: I0122 00:09:49.422475 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" podStartSLOduration=144.42245527 podStartE2EDuration="2m24.42245527s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:49.40244926 +0000 UTC m=+167.438691172" watchObservedRunningTime="2026-01-22 00:09:49.42245527 +0000 UTC m=+167.458697192" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.158202 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.248477 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-config-volume\") pod \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.248575 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-secret-volume\") pod \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.248624 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqhpc\" (UniqueName: \"kubernetes.io/projected/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-kube-api-access-gqhpc\") pod \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\" (UID: \"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9\") " Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.249879 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-config-volume" (OuterVolumeSpecName: "config-volume") pod "c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" (UID: "c91fb8a4-d4a3-4782-94f2-3d6eb06221d9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.265758 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" (UID: "c91fb8a4-d4a3-4782-94f2-3d6eb06221d9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.268997 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-kube-api-access-gqhpc" (OuterVolumeSpecName: "kube-api-access-gqhpc") pod "c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" (UID: "c91fb8a4-d4a3-4782-94f2-3d6eb06221d9"). InnerVolumeSpecName "kube-api-access-gqhpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.334655 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:50 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:50 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:50 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.334858 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.350063 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqhpc\" (UniqueName: \"kubernetes.io/projected/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-kube-api-access-gqhpc\") on node \"crc\" DevicePath \"\"" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.350082 4829 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.350091 4829 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.410561 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c82dd" event={"ID":"74beaade-c8f6-4d34-842b-1c03fe72b195","Type":"ContainerStarted","Data":"7423739f88e4d7e42ce80a94c835dd9ed817ac6dce1d67377e6908c89280b7ae"} Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.414270 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" event={"ID":"c91fb8a4-d4a3-4782-94f2-3d6eb06221d9","Type":"ContainerDied","Data":"4910b9ed18b39cba484c33a568e1f074f944fed94d29b47f77ec62daf3a09939"} Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.414327 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4910b9ed18b39cba484c33a568e1f074f944fed94d29b47f77ec62daf3a09939" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.414331 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf" Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.419444 4829 generic.go:334] "Generic (PLEG): container finished" podID="df877a86-a205-463a-a771-1dee5d9750f8" containerID="efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445" exitCode=0 Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.419554 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njstk" event={"ID":"df877a86-a205-463a-a771-1dee5d9750f8","Type":"ContainerDied","Data":"efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445"} Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.434445 4829 generic.go:334] "Generic (PLEG): container finished" podID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerID="719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030" exitCode=0 Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.434575 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnwgc" event={"ID":"61af08cf-4127-439f-abd2-35fb5ad7dd2f","Type":"ContainerDied","Data":"719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030"} Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.451300 4829 generic.go:334] "Generic (PLEG): container finished" podID="6c0b3871-5d0c-43d2-90e2-12438a699020" containerID="9a7ebcfea738128f1e98636f69fbbd5830cd79083afc3aa614ae4164cc0ed605" exitCode=0 Jan 22 00:09:50 crc kubenswrapper[4829]: I0122 00:09:50.451362 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6c0b3871-5d0c-43d2-90e2-12438a699020","Type":"ContainerDied","Data":"9a7ebcfea738128f1e98636f69fbbd5830cd79083afc3aa614ae4164cc0ed605"} Jan 22 00:09:51 crc kubenswrapper[4829]: I0122 00:09:51.005258 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:51 crc kubenswrapper[4829]: I0122 00:09:51.010592 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-gwk42" Jan 22 00:09:51 crc kubenswrapper[4829]: I0122 00:09:51.346439 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:51 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:51 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:51 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:51 crc kubenswrapper[4829]: I0122 00:09:51.346535 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:51 crc kubenswrapper[4829]: I0122 00:09:51.484616 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-c82dd" event={"ID":"74beaade-c8f6-4d34-842b-1c03fe72b195","Type":"ContainerStarted","Data":"0340233169a62a2deef8f76ee5302c88c64c522cb12b9596cd8a302c24eb0ef4"} Jan 22 00:09:51 crc kubenswrapper[4829]: I0122 00:09:51.573502 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-c82dd" podStartSLOduration=146.57347896 podStartE2EDuration="2m26.57347896s" podCreationTimestamp="2026-01-22 00:07:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:09:51.557231638 +0000 UTC m=+169.593473560" watchObservedRunningTime="2026-01-22 00:09:51.57347896 +0000 UTC m=+169.609720872" Jan 22 00:09:51 crc kubenswrapper[4829]: I0122 00:09:51.985113 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.118004 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c0b3871-5d0c-43d2-90e2-12438a699020-kubelet-dir\") pod \"6c0b3871-5d0c-43d2-90e2-12438a699020\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.118120 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c0b3871-5d0c-43d2-90e2-12438a699020-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6c0b3871-5d0c-43d2-90e2-12438a699020" (UID: "6c0b3871-5d0c-43d2-90e2-12438a699020"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.118177 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c0b3871-5d0c-43d2-90e2-12438a699020-kube-api-access\") pod \"6c0b3871-5d0c-43d2-90e2-12438a699020\" (UID: \"6c0b3871-5d0c-43d2-90e2-12438a699020\") " Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.118558 4829 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6c0b3871-5d0c-43d2-90e2-12438a699020-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.129847 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c0b3871-5d0c-43d2-90e2-12438a699020-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6c0b3871-5d0c-43d2-90e2-12438a699020" (UID: "6c0b3871-5d0c-43d2-90e2-12438a699020"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.183280 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-ht6jq" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.219445 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6c0b3871-5d0c-43d2-90e2-12438a699020-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.334524 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:52 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:52 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:52 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.334641 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.452268 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 00:09:52 crc kubenswrapper[4829]: E0122 00:09:52.452602 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c0b3871-5d0c-43d2-90e2-12438a699020" containerName="pruner" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.452617 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c0b3871-5d0c-43d2-90e2-12438a699020" containerName="pruner" Jan 22 00:09:52 crc kubenswrapper[4829]: E0122 00:09:52.452636 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" containerName="collect-profiles" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.452649 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" containerName="collect-profiles" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.452793 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c0b3871-5d0c-43d2-90e2-12438a699020" containerName="pruner" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.452811 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" containerName="collect-profiles" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.453252 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.455282 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.455801 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.458585 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.549873 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d010742-e9a1-4949-8cd3-e29f666694f5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.549947 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d010742-e9a1-4949-8cd3-e29f666694f5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.560872 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.567026 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"6c0b3871-5d0c-43d2-90e2-12438a699020","Type":"ContainerDied","Data":"30fa555ca06ccd0c49ccd21de05a0e394967baa829a52d41ac910fe5446309ab"} Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.567073 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30fa555ca06ccd0c49ccd21de05a0e394967baa829a52d41ac910fe5446309ab" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.652345 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d010742-e9a1-4949-8cd3-e29f666694f5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.652415 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d010742-e9a1-4949-8cd3-e29f666694f5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.652481 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d010742-e9a1-4949-8cd3-e29f666694f5-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.669220 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d010742-e9a1-4949-8cd3-e29f666694f5-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:52 crc kubenswrapper[4829]: I0122 00:09:52.813945 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:09:53 crc kubenswrapper[4829]: I0122 00:09:53.206610 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 00:09:53 crc kubenswrapper[4829]: W0122 00:09:53.250205 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod4d010742_e9a1_4949_8cd3_e29f666694f5.slice/crio-f422fb4121ef381b93c0df154fda175c97557f663567bca85df1494b9ee13523 WatchSource:0}: Error finding container f422fb4121ef381b93c0df154fda175c97557f663567bca85df1494b9ee13523: Status 404 returned error can't find the container with id f422fb4121ef381b93c0df154fda175c97557f663567bca85df1494b9ee13523 Jan 22 00:09:53 crc kubenswrapper[4829]: I0122 00:09:53.339332 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:53 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:53 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:53 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:53 crc kubenswrapper[4829]: I0122 00:09:53.339491 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:53 crc kubenswrapper[4829]: I0122 00:09:53.593631 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d010742-e9a1-4949-8cd3-e29f666694f5","Type":"ContainerStarted","Data":"f422fb4121ef381b93c0df154fda175c97557f663567bca85df1494b9ee13523"} Jan 22 00:09:54 crc kubenswrapper[4829]: I0122 00:09:54.335324 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:54 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:54 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:54 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:54 crc kubenswrapper[4829]: I0122 00:09:54.335797 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:54 crc kubenswrapper[4829]: I0122 00:09:54.664622 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d010742-e9a1-4949-8cd3-e29f666694f5","Type":"ContainerStarted","Data":"42a36fec99121e68fabcfb3cbe7d7418c7a18e8031dc6dfb5b9418a8732ef4bc"} Jan 22 00:09:55 crc kubenswrapper[4829]: I0122 00:09:55.334486 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:55 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:55 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:55 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:55 crc kubenswrapper[4829]: I0122 00:09:55.334553 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:55 crc kubenswrapper[4829]: I0122 00:09:55.680486 4829 generic.go:334] "Generic (PLEG): container finished" podID="4d010742-e9a1-4949-8cd3-e29f666694f5" containerID="42a36fec99121e68fabcfb3cbe7d7418c7a18e8031dc6dfb5b9418a8732ef4bc" exitCode=0 Jan 22 00:09:55 crc kubenswrapper[4829]: I0122 00:09:55.680572 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d010742-e9a1-4949-8cd3-e29f666694f5","Type":"ContainerDied","Data":"42a36fec99121e68fabcfb3cbe7d7418c7a18e8031dc6dfb5b9418a8732ef4bc"} Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.135579 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.135639 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.136330 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.136380 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.330409 4829 patch_prober.go:28] interesting pod/console-f9d7485db-2fdtt container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.330698 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2fdtt" podUID="a6356f84-ed83-468e-b825-808d0aa2c7d4" containerName="console" probeResult="failure" output="Get \"https://10.217.0.29:8443/health\": dial tcp 10.217.0.29:8443: connect: connection refused" Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.334946 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:56 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:56 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:56 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:56 crc kubenswrapper[4829]: I0122 00:09:56.335020 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:57 crc kubenswrapper[4829]: I0122 00:09:57.336190 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:57 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:57 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:57 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:57 crc kubenswrapper[4829]: I0122 00:09:57.336632 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:58 crc kubenswrapper[4829]: I0122 00:09:58.333309 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:58 crc kubenswrapper[4829]: [-]has-synced failed: reason withheld Jan 22 00:09:58 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:58 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:58 crc kubenswrapper[4829]: I0122 00:09:58.333368 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:09:59 crc kubenswrapper[4829]: I0122 00:09:59.335348 4829 patch_prober.go:28] interesting pod/router-default-5444994796-z9x4d container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 00:09:59 crc kubenswrapper[4829]: [+]has-synced ok Jan 22 00:09:59 crc kubenswrapper[4829]: [+]process-running ok Jan 22 00:09:59 crc kubenswrapper[4829]: healthz check failed Jan 22 00:09:59 crc kubenswrapper[4829]: I0122 00:09:59.336054 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-z9x4d" podUID="1cf7eed9-6c3d-4721-bc4d-bc58044750e0" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 00:10:00 crc kubenswrapper[4829]: I0122 00:10:00.335599 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:10:00 crc kubenswrapper[4829]: I0122 00:10:00.338994 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-z9x4d" Jan 22 00:10:01 crc kubenswrapper[4829]: I0122 00:10:01.592209 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-q9d5h"] Jan 22 00:10:01 crc kubenswrapper[4829]: I0122 00:10:01.592986 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" containerID="cri-o://c1412b09bfd9577fc6d98be98ea9829c4e95361029ee125457dae15f47eb01e7" gracePeriod=30 Jan 22 00:10:01 crc kubenswrapper[4829]: I0122 00:10:01.602207 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp"] Jan 22 00:10:01 crc kubenswrapper[4829]: I0122 00:10:01.602398 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerName="route-controller-manager" containerID="cri-o://c245a3b6afbfec49810a2682a51541ec1be31695ca4b2a88ea188c398236ae80" gracePeriod=30 Jan 22 00:10:03 crc kubenswrapper[4829]: I0122 00:10:03.793559 4829 generic.go:334] "Generic (PLEG): container finished" podID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerID="c245a3b6afbfec49810a2682a51541ec1be31695ca4b2a88ea188c398236ae80" exitCode=0 Jan 22 00:10:03 crc kubenswrapper[4829]: I0122 00:10:03.793619 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" event={"ID":"fc463c00-fc43-4cdc-8ff0-a5026aba9539","Type":"ContainerDied","Data":"c245a3b6afbfec49810a2682a51541ec1be31695ca4b2a88ea188c398236ae80"} Jan 22 00:10:03 crc kubenswrapper[4829]: I0122 00:10:03.796150 4829 generic.go:334] "Generic (PLEG): container finished" podID="0e60009d-5985-47f9-b164-32cf604c23fa" containerID="c1412b09bfd9577fc6d98be98ea9829c4e95361029ee125457dae15f47eb01e7" exitCode=0 Jan 22 00:10:03 crc kubenswrapper[4829]: I0122 00:10:03.796207 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" event={"ID":"0e60009d-5985-47f9-b164-32cf604c23fa","Type":"ContainerDied","Data":"c1412b09bfd9577fc6d98be98ea9829c4e95361029ee125457dae15f47eb01e7"} Jan 22 00:10:04 crc kubenswrapper[4829]: I0122 00:10:04.663460 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:10:04 crc kubenswrapper[4829]: I0122 00:10:04.663604 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:10:05 crc kubenswrapper[4829]: I0122 00:10:05.929179 4829 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-rknpp container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 22 00:10:05 crc kubenswrapper[4829]: I0122 00:10:05.929578 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.129497 4829 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-q9d5h container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.130041 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.135261 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.135320 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.135366 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.135851 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.135931 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.136247 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"ce130acb05813e2d5eacfa87edff5955bab74a59d5a2339f9a7b9ffc776c70ec"} pod="openshift-console/downloads-7954f5f757-8tq29" containerMessage="Container download-server failed liveness probe, will be restarted" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.136339 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" containerID="cri-o://ce130acb05813e2d5eacfa87edff5955bab74a59d5a2339f9a7b9ffc776c70ec" gracePeriod=2 Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.136433 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.136498 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.383806 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:10:06 crc kubenswrapper[4829]: I0122 00:10:06.387385 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-2fdtt" Jan 22 00:10:07 crc kubenswrapper[4829]: I0122 00:10:07.525797 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:10:15 crc kubenswrapper[4829]: I0122 00:10:15.929194 4829 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-rknpp container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 22 00:10:15 crc kubenswrapper[4829]: I0122 00:10:15.929642 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 22 00:10:16 crc kubenswrapper[4829]: I0122 00:10:16.130077 4829 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-q9d5h container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Jan 22 00:10:16 crc kubenswrapper[4829]: I0122 00:10:16.130577 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Jan 22 00:10:16 crc kubenswrapper[4829]: I0122 00:10:16.137354 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:16 crc kubenswrapper[4829]: I0122 00:10:16.137428 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:17 crc kubenswrapper[4829]: I0122 00:10:17.395866 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vdnc9" Jan 22 00:10:17 crc kubenswrapper[4829]: I0122 00:10:17.889980 4829 generic.go:334] "Generic (PLEG): container finished" podID="9447fd29-4eae-4299-b266-1f5236931aee" containerID="ce130acb05813e2d5eacfa87edff5955bab74a59d5a2339f9a7b9ffc776c70ec" exitCode=0 Jan 22 00:10:17 crc kubenswrapper[4829]: I0122 00:10:17.890068 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8tq29" event={"ID":"9447fd29-4eae-4299-b266-1f5236931aee","Type":"ContainerDied","Data":"ce130acb05813e2d5eacfa87edff5955bab74a59d5a2339f9a7b9ffc776c70ec"} Jan 22 00:10:17 crc kubenswrapper[4829]: I0122 00:10:17.893979 4829 generic.go:334] "Generic (PLEG): container finished" podID="fb1dda78-6284-441f-9239-1e9f81282032" containerID="4d4b531428b87c3f22ccf9cd2995e8a5e2929dab98f04e652c64f99bf0ad2a81" exitCode=0 Jan 22 00:10:17 crc kubenswrapper[4829]: I0122 00:10:17.894010 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29484000-7gpqp" event={"ID":"fb1dda78-6284-441f-9239-1e9f81282032","Type":"ContainerDied","Data":"4d4b531428b87c3f22ccf9cd2995e8a5e2929dab98f04e652c64f99bf0ad2a81"} Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.051253 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.052662 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.067430 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183455 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7n75\" (UniqueName: \"kubernetes.io/projected/fc463c00-fc43-4cdc-8ff0-a5026aba9539-kube-api-access-j7n75\") pod \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183525 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-client-ca\") pod \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183634 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fb1dda78-6284-441f-9239-1e9f81282032-serviceca\") pod \"fb1dda78-6284-441f-9239-1e9f81282032\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183658 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d010742-e9a1-4949-8cd3-e29f666694f5-kube-api-access\") pod \"4d010742-e9a1-4949-8cd3-e29f666694f5\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183686 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc463c00-fc43-4cdc-8ff0-a5026aba9539-serving-cert\") pod \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183724 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d010742-e9a1-4949-8cd3-e29f666694f5-kubelet-dir\") pod \"4d010742-e9a1-4949-8cd3-e29f666694f5\" (UID: \"4d010742-e9a1-4949-8cd3-e29f666694f5\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183774 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb4gv\" (UniqueName: \"kubernetes.io/projected/fb1dda78-6284-441f-9239-1e9f81282032-kube-api-access-xb4gv\") pod \"fb1dda78-6284-441f-9239-1e9f81282032\" (UID: \"fb1dda78-6284-441f-9239-1e9f81282032\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.183811 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-config\") pod \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\" (UID: \"fc463c00-fc43-4cdc-8ff0-a5026aba9539\") " Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.184408 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4d010742-e9a1-4949-8cd3-e29f666694f5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4d010742-e9a1-4949-8cd3-e29f666694f5" (UID: "4d010742-e9a1-4949-8cd3-e29f666694f5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.184424 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-client-ca" (OuterVolumeSpecName: "client-ca") pod "fc463c00-fc43-4cdc-8ff0-a5026aba9539" (UID: "fc463c00-fc43-4cdc-8ff0-a5026aba9539"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.184551 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb1dda78-6284-441f-9239-1e9f81282032-serviceca" (OuterVolumeSpecName: "serviceca") pod "fb1dda78-6284-441f-9239-1e9f81282032" (UID: "fb1dda78-6284-441f-9239-1e9f81282032"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.185170 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-config" (OuterVolumeSpecName: "config") pod "fc463c00-fc43-4cdc-8ff0-a5026aba9539" (UID: "fc463c00-fc43-4cdc-8ff0-a5026aba9539"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.191108 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc463c00-fc43-4cdc-8ff0-a5026aba9539-kube-api-access-j7n75" (OuterVolumeSpecName: "kube-api-access-j7n75") pod "fc463c00-fc43-4cdc-8ff0-a5026aba9539" (UID: "fc463c00-fc43-4cdc-8ff0-a5026aba9539"). InnerVolumeSpecName "kube-api-access-j7n75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.191170 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb1dda78-6284-441f-9239-1e9f81282032-kube-api-access-xb4gv" (OuterVolumeSpecName: "kube-api-access-xb4gv") pod "fb1dda78-6284-441f-9239-1e9f81282032" (UID: "fb1dda78-6284-441f-9239-1e9f81282032"). InnerVolumeSpecName "kube-api-access-xb4gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.194340 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc463c00-fc43-4cdc-8ff0-a5026aba9539-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fc463c00-fc43-4cdc-8ff0-a5026aba9539" (UID: "fc463c00-fc43-4cdc-8ff0-a5026aba9539"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.204668 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d010742-e9a1-4949-8cd3-e29f666694f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4d010742-e9a1-4949-8cd3-e29f666694f5" (UID: "4d010742-e9a1-4949-8cd3-e29f666694f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284849 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7n75\" (UniqueName: \"kubernetes.io/projected/fc463c00-fc43-4cdc-8ff0-a5026aba9539-kube-api-access-j7n75\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284876 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284887 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4d010742-e9a1-4949-8cd3-e29f666694f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284895 4829 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/fb1dda78-6284-441f-9239-1e9f81282032-serviceca\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284904 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc463c00-fc43-4cdc-8ff0-a5026aba9539-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284912 4829 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4d010742-e9a1-4949-8cd3-e29f666694f5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284919 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb4gv\" (UniqueName: \"kubernetes.io/projected/fb1dda78-6284-441f-9239-1e9f81282032-kube-api-access-xb4gv\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.284927 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc463c00-fc43-4cdc-8ff0-a5026aba9539-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.952184 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" event={"ID":"fc463c00-fc43-4cdc-8ff0-a5026aba9539","Type":"ContainerDied","Data":"2036e912958295c80841153ed3110802fe4724a1c236a4608ddcd33076fa18d9"} Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.952472 4829 scope.go:117] "RemoveContainer" containerID="c245a3b6afbfec49810a2682a51541ec1be31695ca4b2a88ea188c398236ae80" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.952265 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.953365 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4d010742-e9a1-4949-8cd3-e29f666694f5","Type":"ContainerDied","Data":"f422fb4121ef381b93c0df154fda175c97557f663567bca85df1494b9ee13523"} Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.953449 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f422fb4121ef381b93c0df154fda175c97557f663567bca85df1494b9ee13523" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.953953 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.955817 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29484000-7gpqp" event={"ID":"fb1dda78-6284-441f-9239-1e9f81282032","Type":"ContainerDied","Data":"ab0cccce306b7d79aa9c7bc3cb35a31126bf0b62670017f832b941e6130ed35d"} Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.955855 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab0cccce306b7d79aa9c7bc3cb35a31126bf0b62670017f832b941e6130ed35d" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.955907 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29484000-7gpqp" Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.993988 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp"] Jan 22 00:10:25 crc kubenswrapper[4829]: I0122 00:10:25.997265 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rknpp"] Jan 22 00:10:26 crc kubenswrapper[4829]: I0122 00:10:26.138673 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:26 crc kubenswrapper[4829]: I0122 00:10:26.138821 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:26 crc kubenswrapper[4829]: I0122 00:10:26.562122 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" path="/var/lib/kubelet/pods/fc463c00-fc43-4cdc-8ff0-a5026aba9539/volumes" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.129664 4829 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-q9d5h container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.129763 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 00:10:27 crc kubenswrapper[4829]: E0122 00:10:27.633868 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 00:10:27 crc kubenswrapper[4829]: E0122 00:10:27.634271 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hdh8w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-j66zg_openshift-marketplace(984817e5-ea2b-4a58-a082-0e83447b116a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:27 crc kubenswrapper[4829]: E0122 00:10:27.635701 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-j66zg" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.836521 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 00:10:27 crc kubenswrapper[4829]: E0122 00:10:27.836860 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d010742-e9a1-4949-8cd3-e29f666694f5" containerName="pruner" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.836873 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d010742-e9a1-4949-8cd3-e29f666694f5" containerName="pruner" Jan 22 00:10:27 crc kubenswrapper[4829]: E0122 00:10:27.836889 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerName="route-controller-manager" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.836895 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerName="route-controller-manager" Jan 22 00:10:27 crc kubenswrapper[4829]: E0122 00:10:27.836905 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb1dda78-6284-441f-9239-1e9f81282032" containerName="image-pruner" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.836911 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb1dda78-6284-441f-9239-1e9f81282032" containerName="image-pruner" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.837000 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb1dda78-6284-441f-9239-1e9f81282032" containerName="image-pruner" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.837011 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d010742-e9a1-4949-8cd3-e29f666694f5" containerName="pruner" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.837020 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc463c00-fc43-4cdc-8ff0-a5026aba9539" containerName="route-controller-manager" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.837403 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.843711 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.844241 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.844259 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.983957 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb"] Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.985858 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.988790 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.989627 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.989656 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.990300 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.996175 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 00:10:27 crc kubenswrapper[4829]: I0122 00:10:27.998659 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.006333 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb"] Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.019571 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0849650-9651-4197-a80e-5b33bf0abef0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.019684 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0849650-9651-4197-a80e-5b33bf0abef0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.121742 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/475456c1-0b50-49ca-afcd-0ea6560943fa-serving-cert\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.121961 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0849650-9651-4197-a80e-5b33bf0abef0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.122049 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-client-ca\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.122177 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp2x6\" (UniqueName: \"kubernetes.io/projected/475456c1-0b50-49ca-afcd-0ea6560943fa-kube-api-access-mp2x6\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.122515 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0849650-9651-4197-a80e-5b33bf0abef0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.122612 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-config\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.122650 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0849650-9651-4197-a80e-5b33bf0abef0-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.144095 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0849650-9651-4197-a80e-5b33bf0abef0-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.163619 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.224356 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-config\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.224769 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/475456c1-0b50-49ca-afcd-0ea6560943fa-serving-cert\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.224892 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-client-ca\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.224999 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp2x6\" (UniqueName: \"kubernetes.io/projected/475456c1-0b50-49ca-afcd-0ea6560943fa-kube-api-access-mp2x6\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.225858 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-config\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.226589 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-client-ca\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.229566 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/475456c1-0b50-49ca-afcd-0ea6560943fa-serving-cert\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.241314 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp2x6\" (UniqueName: \"kubernetes.io/projected/475456c1-0b50-49ca-afcd-0ea6560943fa-kube-api-access-mp2x6\") pod \"route-controller-manager-64cbc6c54-nssbb\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:28 crc kubenswrapper[4829]: I0122 00:10:28.314903 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.037912 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.040303 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.045589 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.177521 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-var-lock\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.177613 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-kubelet-dir\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.177638 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6d802df-3827-44ed-96e9-d5013b03aa73-kube-api-access\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.279496 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6d802df-3827-44ed-96e9-d5013b03aa73-kube-api-access\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.279625 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-var-lock\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.279667 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-kubelet-dir\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.279747 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-kubelet-dir\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.279789 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-var-lock\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.297506 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6d802df-3827-44ed-96e9-d5013b03aa73-kube-api-access\") pod \"installer-9-crc\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:32 crc kubenswrapper[4829]: I0122 00:10:32.365150 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:10:33 crc kubenswrapper[4829]: E0122 00:10:33.202986 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-j66zg" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" Jan 22 00:10:33 crc kubenswrapper[4829]: I0122 00:10:33.246527 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-26xkj"] Jan 22 00:10:33 crc kubenswrapper[4829]: E0122 00:10:33.300220 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 00:10:33 crc kubenswrapper[4829]: E0122 00:10:33.300383 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v6llc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-6pznq_openshift-marketplace(08ce8375-3a53-43e6-a7c1-3ce32698965d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:33 crc kubenswrapper[4829]: E0122 00:10:33.301701 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-6pznq" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" Jan 22 00:10:34 crc kubenswrapper[4829]: I0122 00:10:34.658249 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:10:34 crc kubenswrapper[4829]: I0122 00:10:34.658317 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:10:34 crc kubenswrapper[4829]: I0122 00:10:34.658363 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:10:34 crc kubenswrapper[4829]: I0122 00:10:34.659204 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:10:34 crc kubenswrapper[4829]: I0122 00:10:34.659253 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e" gracePeriod=600 Jan 22 00:10:35 crc kubenswrapper[4829]: I0122 00:10:35.022637 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e" exitCode=0 Jan 22 00:10:35 crc kubenswrapper[4829]: I0122 00:10:35.022782 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e"} Jan 22 00:10:35 crc kubenswrapper[4829]: E0122 00:10:35.267408 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-6pznq" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" Jan 22 00:10:35 crc kubenswrapper[4829]: E0122 00:10:35.437356 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 00:10:35 crc kubenswrapper[4829]: E0122 00:10:35.437533 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xq6nc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-tw5xj_openshift-marketplace(622d8532-4cca-4f15-972e-373735e2a5c1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:35 crc kubenswrapper[4829]: E0122 00:10:35.438808 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-tw5xj" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" Jan 22 00:10:35 crc kubenswrapper[4829]: E0122 00:10:35.461482 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 00:10:35 crc kubenswrapper[4829]: E0122 00:10:35.461666 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dm4pv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hp7g2_openshift-marketplace(4802cff4-7d5d-4af4-9ae0-7816748f46b3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:35 crc kubenswrapper[4829]: E0122 00:10:35.462887 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-hp7g2" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.136844 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.136901 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.544658 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hp7g2" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.545226 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-tw5xj" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.593567 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.625757 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw"] Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.626663 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.626690 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.626837 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.627471 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.640076 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw"] Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.662990 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.663199 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fhrsr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-bnqwm_openshift-marketplace(76530c0d-9597-4099-b9c3-f375bd12b26c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.664872 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-bnqwm" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.665025 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.665239 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vmvx2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tnwgc_openshift-marketplace(61af08cf-4127-439f-abd2-35fb5ad7dd2f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.666491 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-tnwgc" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.676813 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-client-ca\") pod \"0e60009d-5985-47f9-b164-32cf604c23fa\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.676996 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-config\") pod \"0e60009d-5985-47f9-b164-32cf604c23fa\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.678234 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-client-ca" (OuterVolumeSpecName: "client-ca") pod "0e60009d-5985-47f9-b164-32cf604c23fa" (UID: "0e60009d-5985-47f9-b164-32cf604c23fa"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.678613 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-config" (OuterVolumeSpecName: "config") pod "0e60009d-5985-47f9-b164-32cf604c23fa" (UID: "0e60009d-5985-47f9-b164-32cf604c23fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.678675 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e60009d-5985-47f9-b164-32cf604c23fa-serving-cert\") pod \"0e60009d-5985-47f9-b164-32cf604c23fa\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.678796 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-proxy-ca-bundles\") pod \"0e60009d-5985-47f9-b164-32cf604c23fa\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.679391 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "0e60009d-5985-47f9-b164-32cf604c23fa" (UID: "0e60009d-5985-47f9-b164-32cf604c23fa"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.679497 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njjvv\" (UniqueName: \"kubernetes.io/projected/0e60009d-5985-47f9-b164-32cf604c23fa-kube-api-access-njjvv\") pod \"0e60009d-5985-47f9-b164-32cf604c23fa\" (UID: \"0e60009d-5985-47f9-b164-32cf604c23fa\") " Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680226 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-serving-cert\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680284 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-proxy-ca-bundles\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680326 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-config\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680476 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-client-ca\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680583 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f428p\" (UniqueName: \"kubernetes.io/projected/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-kube-api-access-f428p\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680773 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680802 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.680818 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0e60009d-5985-47f9-b164-32cf604c23fa-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.684669 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e60009d-5985-47f9-b164-32cf604c23fa-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0e60009d-5985-47f9-b164-32cf604c23fa" (UID: "0e60009d-5985-47f9-b164-32cf604c23fa"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.694634 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e60009d-5985-47f9-b164-32cf604c23fa-kube-api-access-njjvv" (OuterVolumeSpecName: "kube-api-access-njjvv") pod "0e60009d-5985-47f9-b164-32cf604c23fa" (UID: "0e60009d-5985-47f9-b164-32cf604c23fa"). InnerVolumeSpecName "kube-api-access-njjvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.697677 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.697858 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jmvjp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-njstk_openshift-marketplace(df877a86-a205-463a-a771-1dee5d9750f8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.704731 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.704940 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8jqvm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-z7h8p_openshift-marketplace(12c22bf1-09d4-4a41-b977-87595cc90757): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.705066 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-njstk" podUID="df877a86-a205-463a-a771-1dee5d9750f8" Jan 22 00:10:36 crc kubenswrapper[4829]: E0122 00:10:36.706469 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-z7h8p" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.784063 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-serving-cert\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.784697 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-proxy-ca-bundles\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.784773 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-config\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.785913 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-proxy-ca-bundles\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.786557 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-config\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.786676 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-client-ca\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.786774 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f428p\" (UniqueName: \"kubernetes.io/projected/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-kube-api-access-f428p\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.786978 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njjvv\" (UniqueName: \"kubernetes.io/projected/0e60009d-5985-47f9-b164-32cf604c23fa-kube-api-access-njjvv\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.786993 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e60009d-5985-47f9-b164-32cf604c23fa-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.791037 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-client-ca\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.794285 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-serving-cert\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.819422 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f428p\" (UniqueName: \"kubernetes.io/projected/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-kube-api-access-f428p\") pod \"controller-manager-84cc8f9b99-7f9tw\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.889282 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb"] Jan 22 00:10:36 crc kubenswrapper[4829]: W0122 00:10:36.896594 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod475456c1_0b50_49ca_afcd_0ea6560943fa.slice/crio-462e175e703a1cf18e475408eef65a4660608c10607cc419890f4f1cad9dc422 WatchSource:0}: Error finding container 462e175e703a1cf18e475408eef65a4660608c10607cc419890f4f1cad9dc422: Status 404 returned error can't find the container with id 462e175e703a1cf18e475408eef65a4660608c10607cc419890f4f1cad9dc422 Jan 22 00:10:36 crc kubenswrapper[4829]: I0122 00:10:36.989841 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.034618 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8tq29" event={"ID":"9447fd29-4eae-4299-b266-1f5236931aee","Type":"ContainerStarted","Data":"e5c22eb0a56984903d48f8570e480193d4151e1fe206c3854350ab5a71199fdc"} Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.035842 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.035905 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.035930 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.038349 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.039651 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" event={"ID":"0e60009d-5985-47f9-b164-32cf604c23fa","Type":"ContainerDied","Data":"f2c12fd83874c399e632dcaaf4d173fae6ad65735bf179df4ce71b632389b1dc"} Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.039696 4829 scope.go:117] "RemoveContainer" containerID="c1412b09bfd9577fc6d98be98ea9829c4e95361029ee125457dae15f47eb01e7" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.047927 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" event={"ID":"475456c1-0b50-49ca-afcd-0ea6560943fa","Type":"ContainerStarted","Data":"e70fa4ff0a472e710337d4c5d0b5a01e9cdeb212c79b7aff71c87b577348a88d"} Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.047967 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" event={"ID":"475456c1-0b50-49ca-afcd-0ea6560943fa","Type":"ContainerStarted","Data":"462e175e703a1cf18e475408eef65a4660608c10607cc419890f4f1cad9dc422"} Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.048828 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.054803 4829 patch_prober.go:28] interesting pod/route-controller-manager-64cbc6c54-nssbb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.56:8443/healthz\": dial tcp 10.217.0.56:8443: connect: connection refused" start-of-body= Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.054863 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" podUID="475456c1-0b50-49ca-afcd-0ea6560943fa" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.56:8443/healthz\": dial tcp 10.217.0.56:8443: connect: connection refused" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.058086 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"7d8aed39761e236f4d6bdcace20b41c7a53f6e11cebd6db8c3d28637f0ea9c9c"} Jan 22 00:10:37 crc kubenswrapper[4829]: E0122 00:10:37.064016 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-z7h8p" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" Jan 22 00:10:37 crc kubenswrapper[4829]: E0122 00:10:37.064035 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tnwgc" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" Jan 22 00:10:37 crc kubenswrapper[4829]: E0122 00:10:37.064039 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-njstk" podUID="df877a86-a205-463a-a771-1dee5d9750f8" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.129557 4829 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-q9d5h container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.129632 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-q9d5h" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.177387 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-q9d5h"] Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.183335 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-q9d5h"] Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.195753 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" podStartSLOduration=16.195730778 podStartE2EDuration="16.195730778s" podCreationTimestamp="2026-01-22 00:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:10:37.193910733 +0000 UTC m=+215.230152665" watchObservedRunningTime="2026-01-22 00:10:37.195730778 +0000 UTC m=+215.231972690" Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.241287 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.246174 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 00:10:37 crc kubenswrapper[4829]: I0122 00:10:37.585689 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw"] Jan 22 00:10:37 crc kubenswrapper[4829]: W0122 00:10:37.589810 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb68d2ea2_e2f5_4427_b6e6_a76dcf5c1420.slice/crio-a9841c8bb8d033e59b8c0810e3e1a3148c4b5c026c3878c1a4653f7b57e70c63 WatchSource:0}: Error finding container a9841c8bb8d033e59b8c0810e3e1a3148c4b5c026c3878c1a4653f7b57e70c63: Status 404 returned error can't find the container with id a9841c8bb8d033e59b8c0810e3e1a3148c4b5c026c3878c1a4653f7b57e70c63 Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.063732 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f0849650-9651-4197-a80e-5b33bf0abef0","Type":"ContainerStarted","Data":"bc0cb152721d4c42c249d0999f9b47404933bd99cc9a58b1d47da61b81611952"} Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.064035 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f0849650-9651-4197-a80e-5b33bf0abef0","Type":"ContainerStarted","Data":"96f588c86c569ca0258f097097e6c0a30d541d59403187f3c74c8606c2e9ccd9"} Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.067145 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" event={"ID":"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420","Type":"ContainerStarted","Data":"a9e0ad860caec89237c5ba047ee498461588643477da7705f6991553c0525017"} Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.067188 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" event={"ID":"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420","Type":"ContainerStarted","Data":"a9841c8bb8d033e59b8c0810e3e1a3148c4b5c026c3878c1a4653f7b57e70c63"} Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.067525 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.070034 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"b6d802df-3827-44ed-96e9-d5013b03aa73","Type":"ContainerStarted","Data":"968744718eb7ac9bce09af56d4684ee56bdca4bb5627eb76b1b9166518d045a5"} Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.070065 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"b6d802df-3827-44ed-96e9-d5013b03aa73","Type":"ContainerStarted","Data":"9f3fb7e8130315f079129fbb277f1b202ee58cbe2e302454ad50de359edeba20"} Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.071209 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.071246 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.077357 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.086666 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=11.086646378 podStartE2EDuration="11.086646378s" podCreationTimestamp="2026-01-22 00:10:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:10:38.078515667 +0000 UTC m=+216.114757579" watchObservedRunningTime="2026-01-22 00:10:38.086646378 +0000 UTC m=+216.122888300" Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.091083 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.101045 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" podStartSLOduration=17.101022164 podStartE2EDuration="17.101022164s" podCreationTimestamp="2026-01-22 00:10:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:10:38.096964258 +0000 UTC m=+216.133206170" watchObservedRunningTime="2026-01-22 00:10:38.101022164 +0000 UTC m=+216.137264076" Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.129096 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=6.129077789 podStartE2EDuration="6.129077789s" podCreationTimestamp="2026-01-22 00:10:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:10:38.126370322 +0000 UTC m=+216.162612234" watchObservedRunningTime="2026-01-22 00:10:38.129077789 +0000 UTC m=+216.165319711" Jan 22 00:10:38 crc kubenswrapper[4829]: I0122 00:10:38.573425 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e60009d-5985-47f9-b164-32cf604c23fa" path="/var/lib/kubelet/pods/0e60009d-5985-47f9-b164-32cf604c23fa/volumes" Jan 22 00:10:39 crc kubenswrapper[4829]: I0122 00:10:39.076351 4829 generic.go:334] "Generic (PLEG): container finished" podID="f0849650-9651-4197-a80e-5b33bf0abef0" containerID="bc0cb152721d4c42c249d0999f9b47404933bd99cc9a58b1d47da61b81611952" exitCode=0 Jan 22 00:10:39 crc kubenswrapper[4829]: I0122 00:10:39.076401 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f0849650-9651-4197-a80e-5b33bf0abef0","Type":"ContainerDied","Data":"bc0cb152721d4c42c249d0999f9b47404933bd99cc9a58b1d47da61b81611952"} Jan 22 00:10:39 crc kubenswrapper[4829]: I0122 00:10:39.077659 4829 patch_prober.go:28] interesting pod/downloads-7954f5f757-8tq29 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Jan 22 00:10:39 crc kubenswrapper[4829]: I0122 00:10:39.077693 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8tq29" podUID="9447fd29-4eae-4299-b266-1f5236931aee" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.17:8080/\": dial tcp 10.217.0.17:8080: connect: connection refused" Jan 22 00:10:40 crc kubenswrapper[4829]: I0122 00:10:40.388621 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:40 crc kubenswrapper[4829]: I0122 00:10:40.571596 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0849650-9651-4197-a80e-5b33bf0abef0-kubelet-dir\") pod \"f0849650-9651-4197-a80e-5b33bf0abef0\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " Jan 22 00:10:40 crc kubenswrapper[4829]: I0122 00:10:40.571716 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0849650-9651-4197-a80e-5b33bf0abef0-kube-api-access\") pod \"f0849650-9651-4197-a80e-5b33bf0abef0\" (UID: \"f0849650-9651-4197-a80e-5b33bf0abef0\") " Jan 22 00:10:40 crc kubenswrapper[4829]: I0122 00:10:40.572028 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f0849650-9651-4197-a80e-5b33bf0abef0-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f0849650-9651-4197-a80e-5b33bf0abef0" (UID: "f0849650-9651-4197-a80e-5b33bf0abef0"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:10:40 crc kubenswrapper[4829]: I0122 00:10:40.572157 4829 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f0849650-9651-4197-a80e-5b33bf0abef0-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:40 crc kubenswrapper[4829]: I0122 00:10:40.596476 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0849650-9651-4197-a80e-5b33bf0abef0-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f0849650-9651-4197-a80e-5b33bf0abef0" (UID: "f0849650-9651-4197-a80e-5b33bf0abef0"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:10:40 crc kubenswrapper[4829]: I0122 00:10:40.673260 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f0849650-9651-4197-a80e-5b33bf0abef0-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:41 crc kubenswrapper[4829]: I0122 00:10:41.088241 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"f0849650-9651-4197-a80e-5b33bf0abef0","Type":"ContainerDied","Data":"96f588c86c569ca0258f097097e6c0a30d541d59403187f3c74c8606c2e9ccd9"} Jan 22 00:10:41 crc kubenswrapper[4829]: I0122 00:10:41.088283 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96f588c86c569ca0258f097097e6c0a30d541d59403187f3c74c8606c2e9ccd9" Jan 22 00:10:41 crc kubenswrapper[4829]: I0122 00:10:41.088414 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 00:10:43 crc kubenswrapper[4829]: I0122 00:10:43.259935 4829 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-c47sd container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 00:10:43 crc kubenswrapper[4829]: I0122 00:10:43.260673 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-c47sd" podUID="7f619c30-40fb-46a4-956e-366f2192703e" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 00:10:46 crc kubenswrapper[4829]: I0122 00:10:46.158186 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-8tq29" Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.275508 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z7h8p" event={"ID":"12c22bf1-09d4-4a41-b977-87595cc90757","Type":"ContainerStarted","Data":"96c8e36e33e9718513f4d9226614a1f3857b3abdfd4c4b1b0a47cd8647a803b4"} Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.284755 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bnqwm" event={"ID":"76530c0d-9597-4099-b9c3-f375bd12b26c","Type":"ContainerStarted","Data":"2f14618f7a12c46c26c468c1e311019e55b930a614715c7a2f150b3cc2102196"} Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.287788 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tw5xj" event={"ID":"622d8532-4cca-4f15-972e-373735e2a5c1","Type":"ContainerStarted","Data":"5aac395d0f5563af6ede521071dd5fca4f388def893bcc301c03a1e93638cdc6"} Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.292436 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njstk" event={"ID":"df877a86-a205-463a-a771-1dee5d9750f8","Type":"ContainerStarted","Data":"5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801"} Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.294273 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnwgc" event={"ID":"61af08cf-4127-439f-abd2-35fb5ad7dd2f","Type":"ContainerStarted","Data":"cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a"} Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.296364 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hp7g2" event={"ID":"4802cff4-7d5d-4af4-9ae0-7816748f46b3","Type":"ContainerStarted","Data":"1e8b407a1c7e3a5260d4beabaec5afa621a87ce0e49250b7dbc699abbcfca79f"} Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.311607 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pznq" event={"ID":"08ce8375-3a53-43e6-a7c1-3ce32698965d","Type":"ContainerStarted","Data":"5ea9aedf502e1d75ffa38059f395d4fd49a1e52df50beb7f72a2b42ad8bed88d"} Jan 22 00:10:54 crc kubenswrapper[4829]: I0122 00:10:54.321651 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j66zg" event={"ID":"984817e5-ea2b-4a58-a082-0e83447b116a","Type":"ContainerStarted","Data":"336f9e574328b24ea5576b7a10d69b0142539681ba05f6842cc70855f2d7613a"} Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.328448 4829 generic.go:334] "Generic (PLEG): container finished" podID="622d8532-4cca-4f15-972e-373735e2a5c1" containerID="5aac395d0f5563af6ede521071dd5fca4f388def893bcc301c03a1e93638cdc6" exitCode=0 Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.328521 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tw5xj" event={"ID":"622d8532-4cca-4f15-972e-373735e2a5c1","Type":"ContainerDied","Data":"5aac395d0f5563af6ede521071dd5fca4f388def893bcc301c03a1e93638cdc6"} Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.331721 4829 generic.go:334] "Generic (PLEG): container finished" podID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerID="cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a" exitCode=0 Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.331794 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnwgc" event={"ID":"61af08cf-4127-439f-abd2-35fb5ad7dd2f","Type":"ContainerDied","Data":"cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a"} Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.335415 4829 generic.go:334] "Generic (PLEG): container finished" podID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerID="1e8b407a1c7e3a5260d4beabaec5afa621a87ce0e49250b7dbc699abbcfca79f" exitCode=0 Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.335473 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hp7g2" event={"ID":"4802cff4-7d5d-4af4-9ae0-7816748f46b3","Type":"ContainerDied","Data":"1e8b407a1c7e3a5260d4beabaec5afa621a87ce0e49250b7dbc699abbcfca79f"} Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.338666 4829 generic.go:334] "Generic (PLEG): container finished" podID="984817e5-ea2b-4a58-a082-0e83447b116a" containerID="336f9e574328b24ea5576b7a10d69b0142539681ba05f6842cc70855f2d7613a" exitCode=0 Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.338720 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j66zg" event={"ID":"984817e5-ea2b-4a58-a082-0e83447b116a","Type":"ContainerDied","Data":"336f9e574328b24ea5576b7a10d69b0142539681ba05f6842cc70855f2d7613a"} Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.342386 4829 generic.go:334] "Generic (PLEG): container finished" podID="12c22bf1-09d4-4a41-b977-87595cc90757" containerID="96c8e36e33e9718513f4d9226614a1f3857b3abdfd4c4b1b0a47cd8647a803b4" exitCode=0 Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.342469 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z7h8p" event={"ID":"12c22bf1-09d4-4a41-b977-87595cc90757","Type":"ContainerDied","Data":"96c8e36e33e9718513f4d9226614a1f3857b3abdfd4c4b1b0a47cd8647a803b4"} Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.349241 4829 generic.go:334] "Generic (PLEG): container finished" podID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerID="2f14618f7a12c46c26c468c1e311019e55b930a614715c7a2f150b3cc2102196" exitCode=0 Jan 22 00:10:55 crc kubenswrapper[4829]: I0122 00:10:55.349296 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bnqwm" event={"ID":"76530c0d-9597-4099-b9c3-f375bd12b26c","Type":"ContainerDied","Data":"2f14618f7a12c46c26c468c1e311019e55b930a614715c7a2f150b3cc2102196"} Jan 22 00:10:56 crc kubenswrapper[4829]: I0122 00:10:56.356088 4829 generic.go:334] "Generic (PLEG): container finished" podID="df877a86-a205-463a-a771-1dee5d9750f8" containerID="5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801" exitCode=0 Jan 22 00:10:56 crc kubenswrapper[4829]: I0122 00:10:56.356187 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njstk" event={"ID":"df877a86-a205-463a-a771-1dee5d9750f8","Type":"ContainerDied","Data":"5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801"} Jan 22 00:10:56 crc kubenswrapper[4829]: I0122 00:10:56.357769 4829 generic.go:334] "Generic (PLEG): container finished" podID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerID="5ea9aedf502e1d75ffa38059f395d4fd49a1e52df50beb7f72a2b42ad8bed88d" exitCode=0 Jan 22 00:10:56 crc kubenswrapper[4829]: I0122 00:10:56.357798 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pznq" event={"ID":"08ce8375-3a53-43e6-a7c1-3ce32698965d","Type":"ContainerDied","Data":"5ea9aedf502e1d75ffa38059f395d4fd49a1e52df50beb7f72a2b42ad8bed88d"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.306135 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" podUID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" containerName="oauth-openshift" containerID="cri-o://c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0" gracePeriod=15 Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.372252 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pznq" event={"ID":"08ce8375-3a53-43e6-a7c1-3ce32698965d","Type":"ContainerStarted","Data":"17cef235973365c3cc1b0035da5eb414344c8f89bce8f0811b27638a88c9bba0"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.374523 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j66zg" event={"ID":"984817e5-ea2b-4a58-a082-0e83447b116a","Type":"ContainerStarted","Data":"c2bc941445c7249430b49337ffb88767fb848234176b29854cce0eefd7d42670"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.376825 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z7h8p" event={"ID":"12c22bf1-09d4-4a41-b977-87595cc90757","Type":"ContainerStarted","Data":"353b6660e659bf46a8cd0fc9f4270f979d46852432b7f3e12135e1fdec6aa73f"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.379041 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bnqwm" event={"ID":"76530c0d-9597-4099-b9c3-f375bd12b26c","Type":"ContainerStarted","Data":"9336e3dd3ba874f45733d4b6432036581d038d10ce4342db354d0d771e0dc5d8"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.381303 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tw5xj" event={"ID":"622d8532-4cca-4f15-972e-373735e2a5c1","Type":"ContainerStarted","Data":"ac7aeb14f4b7015d809edb9e5bfa876a6ab29ca5609b2c4d24c2ace9878b54cf"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.383357 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njstk" event={"ID":"df877a86-a205-463a-a771-1dee5d9750f8","Type":"ContainerStarted","Data":"727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.386013 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnwgc" event={"ID":"61af08cf-4127-439f-abd2-35fb5ad7dd2f","Type":"ContainerStarted","Data":"49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.389099 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hp7g2" event={"ID":"4802cff4-7d5d-4af4-9ae0-7816748f46b3","Type":"ContainerStarted","Data":"a53a2a77947444deb73d3c0b07e3220b8da23f3051b4b81ae631065af994b89c"} Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.398259 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6pznq" podStartSLOduration=4.229456743 podStartE2EDuration="1m12.398243525s" podCreationTimestamp="2026-01-22 00:09:46 +0000 UTC" firstStartedPulling="2026-01-22 00:09:49.38145753 +0000 UTC m=+167.417699442" lastFinishedPulling="2026-01-22 00:10:57.550244292 +0000 UTC m=+235.586486224" observedRunningTime="2026-01-22 00:10:58.396383338 +0000 UTC m=+236.432625250" watchObservedRunningTime="2026-01-22 00:10:58.398243525 +0000 UTC m=+236.434485437" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.419924 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bnqwm" podStartSLOduration=7.130454108 podStartE2EDuration="1m12.419903231s" podCreationTimestamp="2026-01-22 00:09:46 +0000 UTC" firstStartedPulling="2026-01-22 00:09:50.45362187 +0000 UTC m=+168.489863782" lastFinishedPulling="2026-01-22 00:10:55.743070963 +0000 UTC m=+233.779312905" observedRunningTime="2026-01-22 00:10:58.416377334 +0000 UTC m=+236.452619246" watchObservedRunningTime="2026-01-22 00:10:58.419903231 +0000 UTC m=+236.456145143" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.468527 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j66zg" podStartSLOduration=5.30214783 podStartE2EDuration="1m14.468512623s" podCreationTimestamp="2026-01-22 00:09:44 +0000 UTC" firstStartedPulling="2026-01-22 00:09:48.225630377 +0000 UTC m=+166.261872289" lastFinishedPulling="2026-01-22 00:10:57.39199513 +0000 UTC m=+235.428237082" observedRunningTime="2026-01-22 00:10:58.466277653 +0000 UTC m=+236.502519565" watchObservedRunningTime="2026-01-22 00:10:58.468512623 +0000 UTC m=+236.504754535" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.468959 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tnwgc" podStartSLOduration=5.446914368 podStartE2EDuration="1m12.468954469s" podCreationTimestamp="2026-01-22 00:09:46 +0000 UTC" firstStartedPulling="2026-01-22 00:09:50.437675907 +0000 UTC m=+168.473917819" lastFinishedPulling="2026-01-22 00:10:57.459716008 +0000 UTC m=+235.495957920" observedRunningTime="2026-01-22 00:10:58.443697264 +0000 UTC m=+236.479939176" watchObservedRunningTime="2026-01-22 00:10:58.468954469 +0000 UTC m=+236.505196371" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.486635 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-njstk" podStartSLOduration=5.277671093 podStartE2EDuration="1m12.486612112s" podCreationTimestamp="2026-01-22 00:09:46 +0000 UTC" firstStartedPulling="2026-01-22 00:09:50.421796838 +0000 UTC m=+168.458038750" lastFinishedPulling="2026-01-22 00:10:57.630737857 +0000 UTC m=+235.666979769" observedRunningTime="2026-01-22 00:10:58.484161644 +0000 UTC m=+236.520403556" watchObservedRunningTime="2026-01-22 00:10:58.486612112 +0000 UTC m=+236.522854044" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.510322 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z7h8p" podStartSLOduration=6.798510949 podStartE2EDuration="1m15.510295931s" podCreationTimestamp="2026-01-22 00:09:43 +0000 UTC" firstStartedPulling="2026-01-22 00:09:48.186823266 +0000 UTC m=+166.223065188" lastFinishedPulling="2026-01-22 00:10:56.898608258 +0000 UTC m=+234.934850170" observedRunningTime="2026-01-22 00:10:58.507084756 +0000 UTC m=+236.543326678" watchObservedRunningTime="2026-01-22 00:10:58.510295931 +0000 UTC m=+236.546537843" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.551092 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tw5xj" podStartSLOduration=5.034204281 podStartE2EDuration="1m15.551066142s" podCreationTimestamp="2026-01-22 00:09:43 +0000 UTC" firstStartedPulling="2026-01-22 00:09:47.026310757 +0000 UTC m=+165.062552669" lastFinishedPulling="2026-01-22 00:10:57.543172618 +0000 UTC m=+235.579414530" observedRunningTime="2026-01-22 00:10:58.550274164 +0000 UTC m=+236.586516096" watchObservedRunningTime="2026-01-22 00:10:58.551066142 +0000 UTC m=+236.587308054" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.580524 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hp7g2" podStartSLOduration=7.9853828589999996 podStartE2EDuration="1m15.580506627s" podCreationTimestamp="2026-01-22 00:09:43 +0000 UTC" firstStartedPulling="2026-01-22 00:09:48.170185952 +0000 UTC m=+166.206427864" lastFinishedPulling="2026-01-22 00:10:55.76530972 +0000 UTC m=+233.801551632" observedRunningTime="2026-01-22 00:10:58.577330413 +0000 UTC m=+236.613572315" watchObservedRunningTime="2026-01-22 00:10:58.580506627 +0000 UTC m=+236.616748529" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.834523 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.870235 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-64b545798d-ql89c"] Jan 22 00:10:58 crc kubenswrapper[4829]: E0122 00:10:58.870761 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" containerName="oauth-openshift" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.870904 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" containerName="oauth-openshift" Jan 22 00:10:58 crc kubenswrapper[4829]: E0122 00:10:58.871019 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0849650-9651-4197-a80e-5b33bf0abef0" containerName="pruner" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.871095 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0849650-9651-4197-a80e-5b33bf0abef0" containerName="pruner" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.871292 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0849650-9651-4197-a80e-5b33bf0abef0" containerName="pruner" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.871399 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" containerName="oauth-openshift" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.871984 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.894884 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-64b545798d-ql89c"] Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924268 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-provider-selection\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924337 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-trusted-ca-bundle\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924377 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-router-certs\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924414 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-idp-0-file-data\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924448 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlq6g\" (UniqueName: \"kubernetes.io/projected/de89451c-cbc7-401d-ab19-f4ea8916fcb5-kube-api-access-hlq6g\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924494 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-service-ca\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924534 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-session\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924649 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-cliconfig\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924702 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-dir\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924736 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-serving-cert\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924770 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-policies\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924796 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-error\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924835 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-ocp-branding-template\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.924883 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-login\") pod \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\" (UID: \"de89451c-cbc7-401d-ab19-f4ea8916fcb5\") " Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925098 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-login\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925133 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-error\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925201 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-session\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925230 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-audit-policies\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925260 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-service-ca\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925309 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-cliconfig\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925336 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-router-certs\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925360 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925405 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925436 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925467 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925521 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/43cbf059-9357-458b-8ebf-836241803527-audit-dir\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925565 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-serving-cert\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.925591 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2zpm\" (UniqueName: \"kubernetes.io/projected/43cbf059-9357-458b-8ebf-836241803527-kube-api-access-p2zpm\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.927098 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.927873 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.928712 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.928776 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.931156 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.950531 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.951604 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.952505 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de89451c-cbc7-401d-ab19-f4ea8916fcb5-kube-api-access-hlq6g" (OuterVolumeSpecName: "kube-api-access-hlq6g") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "kube-api-access-hlq6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.953579 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.958062 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.959648 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.960007 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.960898 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:58 crc kubenswrapper[4829]: I0122 00:10:58.965618 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "de89451c-cbc7-401d-ab19-f4ea8916fcb5" (UID: "de89451c-cbc7-401d-ab19-f4ea8916fcb5"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027319 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-cliconfig\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027378 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-router-certs\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027397 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027453 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027480 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027503 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027535 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/43cbf059-9357-458b-8ebf-836241803527-audit-dir\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027577 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-serving-cert\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027597 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2zpm\" (UniqueName: \"kubernetes.io/projected/43cbf059-9357-458b-8ebf-836241803527-kube-api-access-p2zpm\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027626 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-login\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027653 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-error\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027691 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-session\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027709 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-audit-policies\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027728 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-service-ca\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027775 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027788 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027798 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027807 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027816 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlq6g\" (UniqueName: \"kubernetes.io/projected/de89451c-cbc7-401d-ab19-f4ea8916fcb5-kube-api-access-hlq6g\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027826 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027836 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027846 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027855 4829 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027863 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027871 4829 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/de89451c-cbc7-401d-ab19-f4ea8916fcb5-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027880 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027889 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.027900 4829 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/de89451c-cbc7-401d-ab19-f4ea8916fcb5-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.028445 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-cliconfig\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.028534 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/43cbf059-9357-458b-8ebf-836241803527-audit-dir\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.028581 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-service-ca\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.030149 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.030430 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/43cbf059-9357-458b-8ebf-836241803527-audit-policies\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.031607 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-error\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.032426 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.032822 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.036762 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-serving-cert\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.037013 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-session\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.042034 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-router-certs\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.042123 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.042387 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/43cbf059-9357-458b-8ebf-836241803527-v4-0-config-user-template-login\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.053266 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2zpm\" (UniqueName: \"kubernetes.io/projected/43cbf059-9357-458b-8ebf-836241803527-kube-api-access-p2zpm\") pod \"oauth-openshift-64b545798d-ql89c\" (UID: \"43cbf059-9357-458b-8ebf-836241803527\") " pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.199519 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.418861 4829 generic.go:334] "Generic (PLEG): container finished" podID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" containerID="c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0" exitCode=0 Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.419086 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" event={"ID":"de89451c-cbc7-401d-ab19-f4ea8916fcb5","Type":"ContainerDied","Data":"c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0"} Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.419114 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" event={"ID":"de89451c-cbc7-401d-ab19-f4ea8916fcb5","Type":"ContainerDied","Data":"db2cae3d180e6e929f48e6521b1938269f6b06a47fdc638f91195c2251bbb82c"} Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.419130 4829 scope.go:117] "RemoveContainer" containerID="c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.419303 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-26xkj" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.471266 4829 scope.go:117] "RemoveContainer" containerID="c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0" Jan 22 00:10:59 crc kubenswrapper[4829]: E0122 00:10:59.471880 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0\": container with ID starting with c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0 not found: ID does not exist" containerID="c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.471908 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0"} err="failed to get container status \"c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0\": rpc error: code = NotFound desc = could not find container \"c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0\": container with ID starting with c1d68b3f40128e6b7f03d34e5a0acee24319e669352f36ccf10537736987d6a0 not found: ID does not exist" Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.507607 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-26xkj"] Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.509341 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-26xkj"] Jan 22 00:10:59 crc kubenswrapper[4829]: I0122 00:10:59.725171 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-64b545798d-ql89c"] Jan 22 00:11:00 crc kubenswrapper[4829]: I0122 00:11:00.426326 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" event={"ID":"43cbf059-9357-458b-8ebf-836241803527","Type":"ContainerStarted","Data":"2e8811ea0cd9f5a115fa7774e36530a938f0722aab7050f2b1cd7c64c969e0c1"} Jan 22 00:11:00 crc kubenswrapper[4829]: I0122 00:11:00.565955 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de89451c-cbc7-401d-ab19-f4ea8916fcb5" path="/var/lib/kubelet/pods/de89451c-cbc7-401d-ab19-f4ea8916fcb5/volumes" Jan 22 00:11:01 crc kubenswrapper[4829]: I0122 00:11:01.613714 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw"] Jan 22 00:11:01 crc kubenswrapper[4829]: I0122 00:11:01.614080 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" podUID="b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" containerName="controller-manager" containerID="cri-o://a9e0ad860caec89237c5ba047ee498461588643477da7705f6991553c0525017" gracePeriod=30 Jan 22 00:11:01 crc kubenswrapper[4829]: I0122 00:11:01.707955 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb"] Jan 22 00:11:01 crc kubenswrapper[4829]: I0122 00:11:01.708429 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" podUID="475456c1-0b50-49ca-afcd-0ea6560943fa" containerName="route-controller-manager" containerID="cri-o://e70fa4ff0a472e710337d4c5d0b5a01e9cdeb212c79b7aff71c87b577348a88d" gracePeriod=30 Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.450309 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" event={"ID":"43cbf059-9357-458b-8ebf-836241803527","Type":"ContainerStarted","Data":"51bf8fe2b3745d79769ad73ee96b157e864935bc82857b1c709f6c5959e6bfe5"} Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.450946 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.456149 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.456676 4829 generic.go:334] "Generic (PLEG): container finished" podID="475456c1-0b50-49ca-afcd-0ea6560943fa" containerID="e70fa4ff0a472e710337d4c5d0b5a01e9cdeb212c79b7aff71c87b577348a88d" exitCode=0 Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.456731 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" event={"ID":"475456c1-0b50-49ca-afcd-0ea6560943fa","Type":"ContainerDied","Data":"e70fa4ff0a472e710337d4c5d0b5a01e9cdeb212c79b7aff71c87b577348a88d"} Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.458129 4829 generic.go:334] "Generic (PLEG): container finished" podID="b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" containerID="a9e0ad860caec89237c5ba047ee498461588643477da7705f6991553c0525017" exitCode=0 Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.458154 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" event={"ID":"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420","Type":"ContainerDied","Data":"a9e0ad860caec89237c5ba047ee498461588643477da7705f6991553c0525017"} Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.475067 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-64b545798d-ql89c" podStartSLOduration=30.475052098 podStartE2EDuration="30.475052098s" podCreationTimestamp="2026-01-22 00:10:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:11:03.473828304 +0000 UTC m=+241.510070216" watchObservedRunningTime="2026-01-22 00:11:03.475052098 +0000 UTC m=+241.511294010" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.824670 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.829238 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.829273 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.891558 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt"] Jan 22 00:11:03 crc kubenswrapper[4829]: E0122 00:11:03.891844 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="475456c1-0b50-49ca-afcd-0ea6560943fa" containerName="route-controller-manager" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.891856 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="475456c1-0b50-49ca-afcd-0ea6560943fa" containerName="route-controller-manager" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.891973 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="475456c1-0b50-49ca-afcd-0ea6560943fa" containerName="route-controller-manager" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.893083 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.893116 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt"] Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.959907 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.993910 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/475456c1-0b50-49ca-afcd-0ea6560943fa-serving-cert\") pod \"475456c1-0b50-49ca-afcd-0ea6560943fa\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.993968 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-config\") pod \"475456c1-0b50-49ca-afcd-0ea6560943fa\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.994072 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-client-ca\") pod \"475456c1-0b50-49ca-afcd-0ea6560943fa\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.994116 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mp2x6\" (UniqueName: \"kubernetes.io/projected/475456c1-0b50-49ca-afcd-0ea6560943fa-kube-api-access-mp2x6\") pod \"475456c1-0b50-49ca-afcd-0ea6560943fa\" (UID: \"475456c1-0b50-49ca-afcd-0ea6560943fa\") " Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.994234 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-config\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.994264 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17f14648-938f-4bad-aab2-7d733bd29059-serving-cert\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.994352 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcw9h\" (UniqueName: \"kubernetes.io/projected/17f14648-938f-4bad-aab2-7d733bd29059-kube-api-access-fcw9h\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.994379 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-client-ca\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.995321 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-config" (OuterVolumeSpecName: "config") pod "475456c1-0b50-49ca-afcd-0ea6560943fa" (UID: "475456c1-0b50-49ca-afcd-0ea6560943fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.995578 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-client-ca" (OuterVolumeSpecName: "client-ca") pod "475456c1-0b50-49ca-afcd-0ea6560943fa" (UID: "475456c1-0b50-49ca-afcd-0ea6560943fa"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.999414 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/475456c1-0b50-49ca-afcd-0ea6560943fa-kube-api-access-mp2x6" (OuterVolumeSpecName: "kube-api-access-mp2x6") pod "475456c1-0b50-49ca-afcd-0ea6560943fa" (UID: "475456c1-0b50-49ca-afcd-0ea6560943fa"). InnerVolumeSpecName "kube-api-access-mp2x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:11:03 crc kubenswrapper[4829]: I0122 00:11:03.999494 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/475456c1-0b50-49ca-afcd-0ea6560943fa-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "475456c1-0b50-49ca-afcd-0ea6560943fa" (UID: "475456c1-0b50-49ca-afcd-0ea6560943fa"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095227 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f428p\" (UniqueName: \"kubernetes.io/projected/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-kube-api-access-f428p\") pod \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095275 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-proxy-ca-bundles\") pod \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095384 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-config\") pod \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095439 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-serving-cert\") pod \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095458 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-client-ca\") pod \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\" (UID: \"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420\") " Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095609 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17f14648-938f-4bad-aab2-7d733bd29059-serving-cert\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095704 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcw9h\" (UniqueName: \"kubernetes.io/projected/17f14648-938f-4bad-aab2-7d733bd29059-kube-api-access-fcw9h\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095747 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-client-ca\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095783 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-config\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095833 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095844 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/475456c1-0b50-49ca-afcd-0ea6560943fa-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095856 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mp2x6\" (UniqueName: \"kubernetes.io/projected/475456c1-0b50-49ca-afcd-0ea6560943fa-kube-api-access-mp2x6\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.095866 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/475456c1-0b50-49ca-afcd-0ea6560943fa-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.096516 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-client-ca" (OuterVolumeSpecName: "client-ca") pod "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" (UID: "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.096572 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" (UID: "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.096627 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-config" (OuterVolumeSpecName: "config") pod "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" (UID: "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.097131 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-config\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.097239 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-client-ca\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.099677 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-kube-api-access-f428p" (OuterVolumeSpecName: "kube-api-access-f428p") pod "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" (UID: "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420"). InnerVolumeSpecName "kube-api-access-f428p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.100039 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17f14648-938f-4bad-aab2-7d733bd29059-serving-cert\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.100530 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" (UID: "b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.113751 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcw9h\" (UniqueName: \"kubernetes.io/projected/17f14648-938f-4bad-aab2-7d733bd29059-kube-api-access-fcw9h\") pod \"route-controller-manager-d57fd7544-r2hnt\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.197427 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f428p\" (UniqueName: \"kubernetes.io/projected/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-kube-api-access-f428p\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.197466 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.197480 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.197491 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.197500 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.199093 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.199147 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.206713 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.206746 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.207602 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.376598 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.376661 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.386522 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt"] Jan 22 00:11:04 crc kubenswrapper[4829]: W0122 00:11:04.398885 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17f14648_938f_4bad_aab2_7d733bd29059.slice/crio-861b6a1300771a3adcb3ec277f7b6a601186d1a73dfd4bc052214150fd4771e2 WatchSource:0}: Error finding container 861b6a1300771a3adcb3ec277f7b6a601186d1a73dfd4bc052214150fd4771e2: Status 404 returned error can't find the container with id 861b6a1300771a3adcb3ec277f7b6a601186d1a73dfd4bc052214150fd4771e2 Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.464215 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" event={"ID":"17f14648-938f-4bad-aab2-7d733bd29059","Type":"ContainerStarted","Data":"861b6a1300771a3adcb3ec277f7b6a601186d1a73dfd4bc052214150fd4771e2"} Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.466349 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" event={"ID":"b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420","Type":"ContainerDied","Data":"a9841c8bb8d033e59b8c0810e3e1a3148c4b5c026c3878c1a4653f7b57e70c63"} Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.466409 4829 scope.go:117] "RemoveContainer" containerID="a9e0ad860caec89237c5ba047ee498461588643477da7705f6991553c0525017" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.466574 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.476828 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.476820 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb" event={"ID":"475456c1-0b50-49ca-afcd-0ea6560943fa","Type":"ContainerDied","Data":"462e175e703a1cf18e475408eef65a4660608c10607cc419890f4f1cad9dc422"} Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.506108 4829 scope.go:117] "RemoveContainer" containerID="e70fa4ff0a472e710337d4c5d0b5a01e9cdeb212c79b7aff71c87b577348a88d" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.519591 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw"] Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.524075 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-84cc8f9b99-7f9tw"] Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.530932 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb"] Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.534913 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cbc6c54-nssbb"] Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.564069 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="475456c1-0b50-49ca-afcd-0ea6560943fa" path="/var/lib/kubelet/pods/475456c1-0b50-49ca-afcd-0ea6560943fa/volumes" Jan 22 00:11:04 crc kubenswrapper[4829]: I0122 00:11:04.568855 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" path="/var/lib/kubelet/pods/b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420/volumes" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.502800 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.503443 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.503752 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.508779 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.555069 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.555162 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.555187 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:11:05 crc kubenswrapper[4829]: I0122 00:11:05.559027 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.017393 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w"] Jan 22 00:11:06 crc kubenswrapper[4829]: E0122 00:11:06.017706 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" containerName="controller-manager" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.017722 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" containerName="controller-manager" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.017840 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="b68d2ea2-e2f5-4427-b6e6-a76dcf5c1420" containerName="controller-manager" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.018280 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.020297 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq7p9\" (UniqueName: \"kubernetes.io/projected/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-kube-api-access-hq7p9\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.020344 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-proxy-ca-bundles\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.020410 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-serving-cert\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.020437 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-config\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.020514 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-client-ca\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.022442 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.022821 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.023053 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.023073 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.023148 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.023535 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.037676 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.039334 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w"] Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.121293 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-client-ca\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.121377 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq7p9\" (UniqueName: \"kubernetes.io/projected/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-kube-api-access-hq7p9\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.121400 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-proxy-ca-bundles\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.121432 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-serving-cert\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.121453 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-config\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.122280 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-client-ca\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.122679 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-config\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.123427 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-proxy-ca-bundles\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.127522 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-serving-cert\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.141671 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq7p9\" (UniqueName: \"kubernetes.io/projected/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-kube-api-access-hq7p9\") pod \"controller-manager-7c9d5cf4bd-jvh4w\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.335745 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.505843 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" event={"ID":"17f14648-938f-4bad-aab2-7d733bd29059","Type":"ContainerStarted","Data":"f57d688dc66b4b357f3ed506a02af6af2d3213fe1f131b3c0cb451e8850bf4b6"} Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.506801 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.512685 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.525415 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" podStartSLOduration=5.525397492 podStartE2EDuration="5.525397492s" podCreationTimestamp="2026-01-22 00:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:11:06.52424358 +0000 UTC m=+244.560485492" watchObservedRunningTime="2026-01-22 00:11:06.525397492 +0000 UTC m=+244.561639404" Jan 22 00:11:06 crc kubenswrapper[4829]: I0122 00:11:06.610698 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w"] Jan 22 00:11:06 crc kubenswrapper[4829]: W0122 00:11:06.615778 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b59157d_3368_4c08_b2c2_22d6cf19e0fc.slice/crio-4f1fb19a4ac9baaa8ef4f01a832092a092558737dc04fa7bb7fbbf68f917a1e5 WatchSource:0}: Error finding container 4f1fb19a4ac9baaa8ef4f01a832092a092558737dc04fa7bb7fbbf68f917a1e5: Status 404 returned error can't find the container with id 4f1fb19a4ac9baaa8ef4f01a832092a092558737dc04fa7bb7fbbf68f917a1e5 Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.053099 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.053197 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.128225 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.514106 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" event={"ID":"9b59157d-3368-4c08-b2c2-22d6cf19e0fc","Type":"ContainerStarted","Data":"4f1fb19a4ac9baaa8ef4f01a832092a092558737dc04fa7bb7fbbf68f917a1e5"} Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.562464 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.785722 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j66zg"] Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.786036 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-j66zg" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="registry-server" containerID="cri-o://c2bc941445c7249430b49337ffb88767fb848234176b29854cce0eefd7d42670" gracePeriod=2 Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.850983 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.851031 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.875016 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.875075 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.896331 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.903196 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.903586 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.911448 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.944696 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.983093 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z7h8p"] Jan 22 00:11:07 crc kubenswrapper[4829]: I0122 00:11:07.983333 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z7h8p" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="registry-server" containerID="cri-o://353b6660e659bf46a8cd0fc9f4270f979d46852432b7f3e12135e1fdec6aa73f" gracePeriod=2 Jan 22 00:11:08 crc kubenswrapper[4829]: I0122 00:11:08.561391 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:11:08 crc kubenswrapper[4829]: I0122 00:11:08.569467 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:11:08 crc kubenswrapper[4829]: I0122 00:11:08.579044 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.527209 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" event={"ID":"9b59157d-3368-4c08-b2c2-22d6cf19e0fc","Type":"ContainerStarted","Data":"5977512327a76843f9c2cd203a229cb74018957ea37c0b6af07222ae2819865d"} Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.527565 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.531454 4829 generic.go:334] "Generic (PLEG): container finished" podID="984817e5-ea2b-4a58-a082-0e83447b116a" containerID="c2bc941445c7249430b49337ffb88767fb848234176b29854cce0eefd7d42670" exitCode=0 Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.531514 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j66zg" event={"ID":"984817e5-ea2b-4a58-a082-0e83447b116a","Type":"ContainerDied","Data":"c2bc941445c7249430b49337ffb88767fb848234176b29854cce0eefd7d42670"} Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.534336 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.536141 4829 generic.go:334] "Generic (PLEG): container finished" podID="12c22bf1-09d4-4a41-b977-87595cc90757" containerID="353b6660e659bf46a8cd0fc9f4270f979d46852432b7f3e12135e1fdec6aa73f" exitCode=0 Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.536258 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z7h8p" event={"ID":"12c22bf1-09d4-4a41-b977-87595cc90757","Type":"ContainerDied","Data":"353b6660e659bf46a8cd0fc9f4270f979d46852432b7f3e12135e1fdec6aa73f"} Jan 22 00:11:09 crc kubenswrapper[4829]: I0122 00:11:09.548556 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" podStartSLOduration=8.54851725 podStartE2EDuration="8.54851725s" podCreationTimestamp="2026-01-22 00:11:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:11:09.544132293 +0000 UTC m=+247.580374215" watchObservedRunningTime="2026-01-22 00:11:09.54851725 +0000 UTC m=+247.584759162" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.201521 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnwgc"] Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.325181 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.450470 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.484413 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-utilities\") pod \"984817e5-ea2b-4a58-a082-0e83447b116a\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.484568 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdh8w\" (UniqueName: \"kubernetes.io/projected/984817e5-ea2b-4a58-a082-0e83447b116a-kube-api-access-hdh8w\") pod \"984817e5-ea2b-4a58-a082-0e83447b116a\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.484612 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-catalog-content\") pod \"984817e5-ea2b-4a58-a082-0e83447b116a\" (UID: \"984817e5-ea2b-4a58-a082-0e83447b116a\") " Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.485272 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-utilities" (OuterVolumeSpecName: "utilities") pod "984817e5-ea2b-4a58-a082-0e83447b116a" (UID: "984817e5-ea2b-4a58-a082-0e83447b116a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.489742 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/984817e5-ea2b-4a58-a082-0e83447b116a-kube-api-access-hdh8w" (OuterVolumeSpecName: "kube-api-access-hdh8w") pod "984817e5-ea2b-4a58-a082-0e83447b116a" (UID: "984817e5-ea2b-4a58-a082-0e83447b116a"). InnerVolumeSpecName "kube-api-access-hdh8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.532375 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "984817e5-ea2b-4a58-a082-0e83447b116a" (UID: "984817e5-ea2b-4a58-a082-0e83447b116a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.543576 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j66zg" event={"ID":"984817e5-ea2b-4a58-a082-0e83447b116a","Type":"ContainerDied","Data":"c5674c128a2523c527ce7028cb1c0aec21dc993cc448e2b2ecf4f6ed32a33450"} Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.543634 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j66zg" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.543647 4829 scope.go:117] "RemoveContainer" containerID="c2bc941445c7249430b49337ffb88767fb848234176b29854cce0eefd7d42670" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.546372 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z7h8p" event={"ID":"12c22bf1-09d4-4a41-b977-87595cc90757","Type":"ContainerDied","Data":"ca1f83bb37e0a6d945a0aa709c142425553495976663691ebcccefaa7a760256"} Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.546579 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tnwgc" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="registry-server" containerID="cri-o://49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4" gracePeriod=2 Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.546772 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z7h8p" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.560820 4829 scope.go:117] "RemoveContainer" containerID="336f9e574328b24ea5576b7a10d69b0142539681ba05f6842cc70855f2d7613a" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.576104 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-j66zg"] Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.580688 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-j66zg"] Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.585755 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jqvm\" (UniqueName: \"kubernetes.io/projected/12c22bf1-09d4-4a41-b977-87595cc90757-kube-api-access-8jqvm\") pod \"12c22bf1-09d4-4a41-b977-87595cc90757\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.585890 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-catalog-content\") pod \"12c22bf1-09d4-4a41-b977-87595cc90757\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.585967 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-utilities\") pod \"12c22bf1-09d4-4a41-b977-87595cc90757\" (UID: \"12c22bf1-09d4-4a41-b977-87595cc90757\") " Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.586276 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.586305 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdh8w\" (UniqueName: \"kubernetes.io/projected/984817e5-ea2b-4a58-a082-0e83447b116a-kube-api-access-hdh8w\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.586328 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/984817e5-ea2b-4a58-a082-0e83447b116a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.587489 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-utilities" (OuterVolumeSpecName: "utilities") pod "12c22bf1-09d4-4a41-b977-87595cc90757" (UID: "12c22bf1-09d4-4a41-b977-87595cc90757"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.588755 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12c22bf1-09d4-4a41-b977-87595cc90757-kube-api-access-8jqvm" (OuterVolumeSpecName: "kube-api-access-8jqvm") pod "12c22bf1-09d4-4a41-b977-87595cc90757" (UID: "12c22bf1-09d4-4a41-b977-87595cc90757"). InnerVolumeSpecName "kube-api-access-8jqvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.608871 4829 scope.go:117] "RemoveContainer" containerID="16dfca33f972a384bd379840456043faff05d7d836d95f695f103d5c73503e28" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.627388 4829 scope.go:117] "RemoveContainer" containerID="353b6660e659bf46a8cd0fc9f4270f979d46852432b7f3e12135e1fdec6aa73f" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.643134 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12c22bf1-09d4-4a41-b977-87595cc90757" (UID: "12c22bf1-09d4-4a41-b977-87595cc90757"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.687783 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.687816 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12c22bf1-09d4-4a41-b977-87595cc90757-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.688318 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jqvm\" (UniqueName: \"kubernetes.io/projected/12c22bf1-09d4-4a41-b977-87595cc90757-kube-api-access-8jqvm\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.894807 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z7h8p"] Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.901729 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z7h8p"] Jan 22 00:11:10 crc kubenswrapper[4829]: I0122 00:11:10.992821 4829 scope.go:117] "RemoveContainer" containerID="96c8e36e33e9718513f4d9226614a1f3857b3abdfd4c4b1b0a47cd8647a803b4" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.008326 4829 scope.go:117] "RemoveContainer" containerID="e24fe241c1b9585af07bb8be5ffb27009b6c427fdfdf7e8b683d7c82333c741d" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.278124 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.396400 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmvx2\" (UniqueName: \"kubernetes.io/projected/61af08cf-4127-439f-abd2-35fb5ad7dd2f-kube-api-access-vmvx2\") pod \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.396618 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-utilities\") pod \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.396748 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-catalog-content\") pod \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\" (UID: \"61af08cf-4127-439f-abd2-35fb5ad7dd2f\") " Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.397671 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-utilities" (OuterVolumeSpecName: "utilities") pod "61af08cf-4127-439f-abd2-35fb5ad7dd2f" (UID: "61af08cf-4127-439f-abd2-35fb5ad7dd2f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.399177 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61af08cf-4127-439f-abd2-35fb5ad7dd2f-kube-api-access-vmvx2" (OuterVolumeSpecName: "kube-api-access-vmvx2") pod "61af08cf-4127-439f-abd2-35fb5ad7dd2f" (UID: "61af08cf-4127-439f-abd2-35fb5ad7dd2f"). InnerVolumeSpecName "kube-api-access-vmvx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.416738 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61af08cf-4127-439f-abd2-35fb5ad7dd2f" (UID: "61af08cf-4127-439f-abd2-35fb5ad7dd2f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.498503 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.498573 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61af08cf-4127-439f-abd2-35fb5ad7dd2f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.498587 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmvx2\" (UniqueName: \"kubernetes.io/projected/61af08cf-4127-439f-abd2-35fb5ad7dd2f-kube-api-access-vmvx2\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.554894 4829 generic.go:334] "Generic (PLEG): container finished" podID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerID="49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4" exitCode=0 Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.554942 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnwgc" event={"ID":"61af08cf-4127-439f-abd2-35fb5ad7dd2f","Type":"ContainerDied","Data":"49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4"} Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.554980 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tnwgc" event={"ID":"61af08cf-4127-439f-abd2-35fb5ad7dd2f","Type":"ContainerDied","Data":"5eaa81f52d3a19262a6195ef934f9d089f66898812524fa8bf7b7aafdecdc606"} Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.554979 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tnwgc" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.555000 4829 scope.go:117] "RemoveContainer" containerID="49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.569325 4829 scope.go:117] "RemoveContainer" containerID="cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.589901 4829 scope.go:117] "RemoveContainer" containerID="719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.610271 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnwgc"] Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.613943 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tnwgc"] Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.624234 4829 scope.go:117] "RemoveContainer" containerID="49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4" Jan 22 00:11:11 crc kubenswrapper[4829]: E0122 00:11:11.624742 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4\": container with ID starting with 49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4 not found: ID does not exist" containerID="49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.624811 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4"} err="failed to get container status \"49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4\": rpc error: code = NotFound desc = could not find container \"49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4\": container with ID starting with 49b49a50d77971c02b4578b97c68c9f6151a9793e84943a1878fafe7e6f8dbf4 not found: ID does not exist" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.624854 4829 scope.go:117] "RemoveContainer" containerID="cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a" Jan 22 00:11:11 crc kubenswrapper[4829]: E0122 00:11:11.625321 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a\": container with ID starting with cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a not found: ID does not exist" containerID="cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.625368 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a"} err="failed to get container status \"cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a\": rpc error: code = NotFound desc = could not find container \"cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a\": container with ID starting with cbaf96230582d871aca28ca0543f47fe16ef8e8194116100845e3dc0dcb81c5a not found: ID does not exist" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.625397 4829 scope.go:117] "RemoveContainer" containerID="719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030" Jan 22 00:11:11 crc kubenswrapper[4829]: E0122 00:11:11.625740 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030\": container with ID starting with 719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030 not found: ID does not exist" containerID="719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030" Jan 22 00:11:11 crc kubenswrapper[4829]: I0122 00:11:11.625780 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030"} err="failed to get container status \"719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030\": rpc error: code = NotFound desc = could not find container \"719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030\": container with ID starting with 719a20bba54cfa2e89db7ded5aeb1543b439b1830c2a8e1b7cb330c2b03d1030 not found: ID does not exist" Jan 22 00:11:12 crc kubenswrapper[4829]: I0122 00:11:12.560814 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" path="/var/lib/kubelet/pods/12c22bf1-09d4-4a41-b977-87595cc90757/volumes" Jan 22 00:11:12 crc kubenswrapper[4829]: I0122 00:11:12.562431 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" path="/var/lib/kubelet/pods/61af08cf-4127-439f-abd2-35fb5ad7dd2f/volumes" Jan 22 00:11:12 crc kubenswrapper[4829]: I0122 00:11:12.563295 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" path="/var/lib/kubelet/pods/984817e5-ea2b-4a58-a082-0e83447b116a/volumes" Jan 22 00:11:12 crc kubenswrapper[4829]: I0122 00:11:12.590607 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-njstk"] Jan 22 00:11:12 crc kubenswrapper[4829]: I0122 00:11:12.590999 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-njstk" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="registry-server" containerID="cri-o://727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4" gracePeriod=2 Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.488615 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.574657 4829 generic.go:334] "Generic (PLEG): container finished" podID="df877a86-a205-463a-a771-1dee5d9750f8" containerID="727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4" exitCode=0 Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.574746 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njstk" event={"ID":"df877a86-a205-463a-a771-1dee5d9750f8","Type":"ContainerDied","Data":"727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4"} Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.574789 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njstk" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.574832 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njstk" event={"ID":"df877a86-a205-463a-a771-1dee5d9750f8","Type":"ContainerDied","Data":"3e33332956cc6fb8c82732ffc0a32ab0568418a073381bae06b72c3636cc2b92"} Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.574863 4829 scope.go:117] "RemoveContainer" containerID="727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.594396 4829 scope.go:117] "RemoveContainer" containerID="5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.611650 4829 scope.go:117] "RemoveContainer" containerID="efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.628425 4829 scope.go:117] "RemoveContainer" containerID="727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4" Jan 22 00:11:13 crc kubenswrapper[4829]: E0122 00:11:13.629007 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4\": container with ID starting with 727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4 not found: ID does not exist" containerID="727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.629049 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4"} err="failed to get container status \"727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4\": rpc error: code = NotFound desc = could not find container \"727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4\": container with ID starting with 727d5a1857f2be99f217ad4f7caa2bc9a814bf1bd82e1a224fe875abdc3ed8e4 not found: ID does not exist" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.629073 4829 scope.go:117] "RemoveContainer" containerID="5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801" Jan 22 00:11:13 crc kubenswrapper[4829]: E0122 00:11:13.629491 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801\": container with ID starting with 5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801 not found: ID does not exist" containerID="5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.629519 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801"} err="failed to get container status \"5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801\": rpc error: code = NotFound desc = could not find container \"5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801\": container with ID starting with 5cd088c639af2800665a5a55bf22d604d428dcacea09adec7487682906340801 not found: ID does not exist" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.629534 4829 scope.go:117] "RemoveContainer" containerID="efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445" Jan 22 00:11:13 crc kubenswrapper[4829]: E0122 00:11:13.629820 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445\": container with ID starting with efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445 not found: ID does not exist" containerID="efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.629850 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445"} err="failed to get container status \"efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445\": rpc error: code = NotFound desc = could not find container \"efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445\": container with ID starting with efbdb14d14212263f32a51c762cbae85183b8694ccdde2c8a2117250dd0ac445 not found: ID does not exist" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.632348 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmvjp\" (UniqueName: \"kubernetes.io/projected/df877a86-a205-463a-a771-1dee5d9750f8-kube-api-access-jmvjp\") pod \"df877a86-a205-463a-a771-1dee5d9750f8\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.632391 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-catalog-content\") pod \"df877a86-a205-463a-a771-1dee5d9750f8\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.632473 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-utilities\") pod \"df877a86-a205-463a-a771-1dee5d9750f8\" (UID: \"df877a86-a205-463a-a771-1dee5d9750f8\") " Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.633486 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-utilities" (OuterVolumeSpecName: "utilities") pod "df877a86-a205-463a-a771-1dee5d9750f8" (UID: "df877a86-a205-463a-a771-1dee5d9750f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.638339 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df877a86-a205-463a-a771-1dee5d9750f8-kube-api-access-jmvjp" (OuterVolumeSpecName: "kube-api-access-jmvjp") pod "df877a86-a205-463a-a771-1dee5d9750f8" (UID: "df877a86-a205-463a-a771-1dee5d9750f8"). InnerVolumeSpecName "kube-api-access-jmvjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.734281 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.734402 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmvjp\" (UniqueName: \"kubernetes.io/projected/df877a86-a205-463a-a771-1dee5d9750f8-kube-api-access-jmvjp\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.755185 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df877a86-a205-463a-a771-1dee5d9750f8" (UID: "df877a86-a205-463a-a771-1dee5d9750f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.835205 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df877a86-a205-463a-a771-1dee5d9750f8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.906477 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-njstk"] Jan 22 00:11:13 crc kubenswrapper[4829]: I0122 00:11:13.909799 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-njstk"] Jan 22 00:11:14 crc kubenswrapper[4829]: I0122 00:11:14.567246 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df877a86-a205-463a-a771-1dee5d9750f8" path="/var/lib/kubelet/pods/df877a86-a205-463a-a771-1dee5d9750f8/volumes" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.121116 4829 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123379 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123414 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123432 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123441 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123454 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123462 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123471 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123479 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123490 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123496 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123506 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123512 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="extract-utilities" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123525 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123560 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123570 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123580 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123591 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123599 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123614 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123621 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123630 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123638 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.123652 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123658 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="extract-content" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123794 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="df877a86-a205-463a-a771-1dee5d9750f8" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123809 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="61af08cf-4127-439f-abd2-35fb5ad7dd2f" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123822 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="12c22bf1-09d4-4a41-b977-87595cc90757" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.123836 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="984817e5-ea2b-4a58-a082-0e83447b116a" containerName="registry-server" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.124453 4829 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.124496 4829 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.124667 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.124982 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125017 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.125031 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125039 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.125053 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125061 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.125072 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125080 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.125093 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125102 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.125113 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125122 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.125136 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125145 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125234 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3" gracePeriod=15 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125225 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515" gracePeriod=15 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125304 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125319 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125326 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125342 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125353 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125371 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74" gracePeriod=15 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125386 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca" gracePeriod=15 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125699 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.125704 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81" gracePeriod=15 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.128893 4829 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.175506 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256380 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256441 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256507 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256553 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256616 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256647 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256765 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.256990 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358134 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358176 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358209 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358235 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358255 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358309 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358312 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358366 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358329 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358406 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358447 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358448 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358513 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358513 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358561 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.358453 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.469427 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:11:15 crc kubenswrapper[4829]: W0122 00:11:15.493144 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-2122f24e6c838685e0cca384eede78912afec056d3b75dc5ff7e444beec14e0b WatchSource:0}: Error finding container 2122f24e6c838685e0cca384eede78912afec056d3b75dc5ff7e444beec14e0b: Status 404 returned error can't find the container with id 2122f24e6c838685e0cca384eede78912afec056d3b75dc5ff7e444beec14e0b Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.496967 4829 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.197:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188ce51f2029a85c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 00:11:15.496110172 +0000 UTC m=+253.532352084,LastTimestamp:2026-01-22 00:11:15.496110172 +0000 UTC m=+253.532352084,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.589350 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2122f24e6c838685e0cca384eede78912afec056d3b75dc5ff7e444beec14e0b"} Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.593138 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.595743 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.596902 4829 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3" exitCode=0 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.596927 4829 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca" exitCode=0 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.596937 4829 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81" exitCode=0 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.596947 4829 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74" exitCode=2 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.597000 4829 scope.go:117] "RemoveContainer" containerID="24c61825f0965cd1f313d6073f5bfea88d00cdaae45e2e471f637e79912e7bf2" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.600128 4829 generic.go:334] "Generic (PLEG): container finished" podID="b6d802df-3827-44ed-96e9-d5013b03aa73" containerID="968744718eb7ac9bce09af56d4684ee56bdca4bb5627eb76b1b9166518d045a5" exitCode=0 Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.600195 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"b6d802df-3827-44ed-96e9-d5013b03aa73","Type":"ContainerDied","Data":"968744718eb7ac9bce09af56d4684ee56bdca4bb5627eb76b1b9166518d045a5"} Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.601302 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:15 crc kubenswrapper[4829]: I0122 00:11:15.602020 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.932568 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:15Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:15Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:15Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:15Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:7aded5259ce52ede3b2e9bda03c7207ce72911f78e1eb950163551f36bfa7a57\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8e868fbd200736e653e7def35b5a401c0f4ddf51e08d9570b690782d41d30cb2\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1671128841},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:aad5e438ec868272540a84dfc53b266c8a08267bec7a7617871dddeb1511dcb2\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:dd1e95af8b913ea8f010fa96cba36f2e7e5b1edfbf758c69b8c9eeb88c6911ea\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202744046},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b72e40c5d5b36b681f40c16ebf3dcac6520ed0c79f174ba87f673ab7afd209a\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:d83ee77ad07e06451a84205ac4c85c69e912a1c975e1a8a95095d79218028dce\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1178956511},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1f907bbb9feda871911fe6104a05039ba4876ca82e26d41398008385f8a7656b\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:cffa3fc70ac997825130e3768ebb1a24956f427e18870f87ef0654513b3cd657\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1168433908},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.933283 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.933580 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.933885 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.934181 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:15 crc kubenswrapper[4829]: E0122 00:11:15.934209 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:11:16 crc kubenswrapper[4829]: I0122 00:11:16.609197 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb"} Jan 22 00:11:16 crc kubenswrapper[4829]: I0122 00:11:16.610127 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:16 crc kubenswrapper[4829]: I0122 00:11:16.610569 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:16 crc kubenswrapper[4829]: I0122 00:11:16.614624 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 00:11:16 crc kubenswrapper[4829]: I0122 00:11:16.993976 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:11:16 crc kubenswrapper[4829]: I0122 00:11:16.994951 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:16 crc kubenswrapper[4829]: I0122 00:11:16.995375 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.085286 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6d802df-3827-44ed-96e9-d5013b03aa73-kube-api-access\") pod \"b6d802df-3827-44ed-96e9-d5013b03aa73\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.085360 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-kubelet-dir\") pod \"b6d802df-3827-44ed-96e9-d5013b03aa73\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.085395 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-var-lock\") pod \"b6d802df-3827-44ed-96e9-d5013b03aa73\" (UID: \"b6d802df-3827-44ed-96e9-d5013b03aa73\") " Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.085866 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-var-lock" (OuterVolumeSpecName: "var-lock") pod "b6d802df-3827-44ed-96e9-d5013b03aa73" (UID: "b6d802df-3827-44ed-96e9-d5013b03aa73"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.086760 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b6d802df-3827-44ed-96e9-d5013b03aa73" (UID: "b6d802df-3827-44ed-96e9-d5013b03aa73"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.096020 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6d802df-3827-44ed-96e9-d5013b03aa73-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b6d802df-3827-44ed-96e9-d5013b03aa73" (UID: "b6d802df-3827-44ed-96e9-d5013b03aa73"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.187787 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b6d802df-3827-44ed-96e9-d5013b03aa73-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.187818 4829 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.187827 4829 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/b6d802df-3827-44ed-96e9-d5013b03aa73-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.478100 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.479387 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.480143 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.480531 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.480899 4829 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.593215 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.593306 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.593358 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.593829 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.593837 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.593921 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.622815 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"b6d802df-3827-44ed-96e9-d5013b03aa73","Type":"ContainerDied","Data":"9f3fb7e8130315f079129fbb277f1b202ee58cbe2e302454ad50de359edeba20"} Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.622875 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f3fb7e8130315f079129fbb277f1b202ee58cbe2e302454ad50de359edeba20" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.622895 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.626120 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.626909 4829 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515" exitCode=0 Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.627035 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.627067 4829 scope.go:117] "RemoveContainer" containerID="885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.647616 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.648150 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.648790 4829 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.649444 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.649761 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.650059 4829 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.656495 4829 scope.go:117] "RemoveContainer" containerID="dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.670837 4829 scope.go:117] "RemoveContainer" containerID="78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.683930 4829 scope.go:117] "RemoveContainer" containerID="af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.694782 4829 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.694809 4829 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.694819 4829 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.697219 4829 scope.go:117] "RemoveContainer" containerID="1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.711175 4829 scope.go:117] "RemoveContainer" containerID="ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.727505 4829 scope.go:117] "RemoveContainer" containerID="885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3" Jan 22 00:11:17 crc kubenswrapper[4829]: E0122 00:11:17.728142 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\": container with ID starting with 885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3 not found: ID does not exist" containerID="885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.728203 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3"} err="failed to get container status \"885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\": rpc error: code = NotFound desc = could not find container \"885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3\": container with ID starting with 885edc80ee39315160b432391c13c811a53ce5792560ec4d2b5aacfccfc1d2c3 not found: ID does not exist" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.728238 4829 scope.go:117] "RemoveContainer" containerID="dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca" Jan 22 00:11:17 crc kubenswrapper[4829]: E0122 00:11:17.728611 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\": container with ID starting with dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca not found: ID does not exist" containerID="dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.728636 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca"} err="failed to get container status \"dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\": rpc error: code = NotFound desc = could not find container \"dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca\": container with ID starting with dde9f8b4c227fa389852b69b398c481938b55dd86183e814d42198b3113931ca not found: ID does not exist" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.728651 4829 scope.go:117] "RemoveContainer" containerID="78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81" Jan 22 00:11:17 crc kubenswrapper[4829]: E0122 00:11:17.729246 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\": container with ID starting with 78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81 not found: ID does not exist" containerID="78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.729271 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81"} err="failed to get container status \"78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\": rpc error: code = NotFound desc = could not find container \"78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81\": container with ID starting with 78276af38a6c705dfc8a6238d670d514894c3dcf6d82df5301f43136cfe41d81 not found: ID does not exist" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.729286 4829 scope.go:117] "RemoveContainer" containerID="af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74" Jan 22 00:11:17 crc kubenswrapper[4829]: E0122 00:11:17.729772 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\": container with ID starting with af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74 not found: ID does not exist" containerID="af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.729815 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74"} err="failed to get container status \"af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\": rpc error: code = NotFound desc = could not find container \"af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74\": container with ID starting with af9b39e15624dec760fdb21c8537f611462c678e8be0496ed69dedbe46f8ad74 not found: ID does not exist" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.729830 4829 scope.go:117] "RemoveContainer" containerID="1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515" Jan 22 00:11:17 crc kubenswrapper[4829]: E0122 00:11:17.730071 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\": container with ID starting with 1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515 not found: ID does not exist" containerID="1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.730093 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515"} err="failed to get container status \"1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\": rpc error: code = NotFound desc = could not find container \"1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515\": container with ID starting with 1b76f9966e9fc04171fccccf7a1cf2c86c95e41c2b2dd76209d9ead5c9803515 not found: ID does not exist" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.730124 4829 scope.go:117] "RemoveContainer" containerID="ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed" Jan 22 00:11:17 crc kubenswrapper[4829]: E0122 00:11:17.730319 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\": container with ID starting with ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed not found: ID does not exist" containerID="ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed" Jan 22 00:11:17 crc kubenswrapper[4829]: I0122 00:11:17.730342 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed"} err="failed to get container status \"ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\": rpc error: code = NotFound desc = could not find container \"ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed\": container with ID starting with ee9db5274096ab86219b31b459d166d9b0ef8343a57c9d0bea95e72cdd3aaaed not found: ID does not exist" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.367384 4829 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.367675 4829 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.367902 4829 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.368139 4829 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.368370 4829 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:18 crc kubenswrapper[4829]: I0122 00:11:18.368399 4829 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.368635 4829 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" interval="200ms" Jan 22 00:11:18 crc kubenswrapper[4829]: I0122 00:11:18.561465 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.569290 4829 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" interval="400ms" Jan 22 00:11:18 crc kubenswrapper[4829]: E0122 00:11:18.971190 4829 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" interval="800ms" Jan 22 00:11:19 crc kubenswrapper[4829]: E0122 00:11:19.772601 4829 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" interval="1.6s" Jan 22 00:11:21 crc kubenswrapper[4829]: E0122 00:11:21.373731 4829 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" interval="3.2s" Jan 22 00:11:21 crc kubenswrapper[4829]: E0122 00:11:21.922099 4829 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.129.56.197:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188ce51f2029a85c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 00:11:15.496110172 +0000 UTC m=+253.532352084,LastTimestamp:2026-01-22 00:11:15.496110172 +0000 UTC m=+253.532352084,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 00:11:22 crc kubenswrapper[4829]: I0122 00:11:22.559627 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:22 crc kubenswrapper[4829]: I0122 00:11:22.560441 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:24 crc kubenswrapper[4829]: E0122 00:11:24.575258 4829 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" interval="6.4s" Jan 22 00:11:26 crc kubenswrapper[4829]: E0122 00:11:26.142663 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:26Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:26Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:26Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T00:11:26Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:7aded5259ce52ede3b2e9bda03c7207ce72911f78e1eb950163551f36bfa7a57\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8e868fbd200736e653e7def35b5a401c0f4ddf51e08d9570b690782d41d30cb2\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1671128841},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:aad5e438ec868272540a84dfc53b266c8a08267bec7a7617871dddeb1511dcb2\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:dd1e95af8b913ea8f010fa96cba36f2e7e5b1edfbf758c69b8c9eeb88c6911ea\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1202744046},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:2b72e40c5d5b36b681f40c16ebf3dcac6520ed0c79f174ba87f673ab7afd209a\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:d83ee77ad07e06451a84205ac4c85c69e912a1c975e1a8a95095d79218028dce\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1178956511},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1f907bbb9feda871911fe6104a05039ba4876ca82e26d41398008385f8a7656b\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:cffa3fc70ac997825130e3768ebb1a24956f427e18870f87ef0654513b3cd657\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1168433908},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:26 crc kubenswrapper[4829]: E0122 00:11:26.145249 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:26 crc kubenswrapper[4829]: E0122 00:11:26.145430 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:26 crc kubenswrapper[4829]: E0122 00:11:26.145593 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:26 crc kubenswrapper[4829]: E0122 00:11:26.145736 4829 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:26 crc kubenswrapper[4829]: E0122 00:11:26.145774 4829 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.552732 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.553954 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.554554 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.571737 4829 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.571770 4829 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:28 crc kubenswrapper[4829]: E0122 00:11:28.572332 4829 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.573030 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:28 crc kubenswrapper[4829]: W0122 00:11:28.595132 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-fd7f8d9b227da3187edc03d59e459c3de94cc93326819f02566e8de6aa11823a WatchSource:0}: Error finding container fd7f8d9b227da3187edc03d59e459c3de94cc93326819f02566e8de6aa11823a: Status 404 returned error can't find the container with id fd7f8d9b227da3187edc03d59e459c3de94cc93326819f02566e8de6aa11823a Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.691292 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"fd7f8d9b227da3187edc03d59e459c3de94cc93326819f02566e8de6aa11823a"} Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.693790 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.693835 4829 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053" exitCode=1 Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.693862 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053"} Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.694407 4829 scope.go:117] "RemoveContainer" containerID="f0eca284669ba854b151259cf608fa94e9ee935d951004d73c0d0db343e00053" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.695172 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.695612 4829 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:28 crc kubenswrapper[4829]: I0122 00:11:28.696345 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.702650 4829 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="61b1303638430e2b4a819eec367e24bd9248f5993a2d2015d42722ec3a10c678" exitCode=0 Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.702724 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"61b1303638430e2b4a819eec367e24bd9248f5993a2d2015d42722ec3a10c678"} Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.703099 4829 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.703141 4829 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.703785 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:29 crc kubenswrapper[4829]: E0122 00:11:29.703835 4829 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.704318 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.704826 4829 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.708282 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.708326 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"df5be984a7ff1430c637afd7ef546401a4509d7ca1cb0e2aab9d5a8274e5768e"} Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.709181 4829 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.709494 4829 status_manager.go:851] "Failed to get status for pod" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:29 crc kubenswrapper[4829]: I0122 00:11:29.709676 4829 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.129.56.197:6443: connect: connection refused" Jan 22 00:11:30 crc kubenswrapper[4829]: I0122 00:11:30.590893 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:11:30 crc kubenswrapper[4829]: I0122 00:11:30.591315 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:11:30 crc kubenswrapper[4829]: I0122 00:11:30.730079 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0a50f0ee2ce70ad5892e74a71ef8eafc5cbdb98da1458d60c77f081e576bab78"} Jan 22 00:11:30 crc kubenswrapper[4829]: I0122 00:11:30.730134 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2d1d57203f0b47dbcd7317ede597378beb864761910669ec4bf6a9ff272190d0"} Jan 22 00:11:30 crc kubenswrapper[4829]: I0122 00:11:30.730148 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5b695fe3aa2fa2dad747db17ac688ff8b0e8a9894f0b17fdb24bc4c335a22de8"} Jan 22 00:11:30 crc kubenswrapper[4829]: I0122 00:11:30.730161 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"155de7a3213411eec326ceb8afd7d3917f71bd8b78794384cae0ec8c0f2eb0a9"} Jan 22 00:11:31 crc kubenswrapper[4829]: E0122 00:11:31.576677 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-s2dwl], unattached volumes=[], failed to process volumes=[]: context deadline exceeded" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 00:11:31 crc kubenswrapper[4829]: E0122 00:11:31.591693 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:31 crc kubenswrapper[4829]: E0122 00:11:31.591702 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:31 crc kubenswrapper[4829]: I0122 00:11:31.739196 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9a6d2f731a785245402346f666e6f63f425d5fd4f342364d8e6e3e4642c011ad"} Jan 22 00:11:31 crc kubenswrapper[4829]: I0122 00:11:31.739374 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:31 crc kubenswrapper[4829]: I0122 00:11:31.739482 4829 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:31 crc kubenswrapper[4829]: I0122 00:11:31.739508 4829 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:32 crc kubenswrapper[4829]: E0122 00:11:32.577151 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-cqllr], unattached volumes=[], failed to process volumes=[]: context deadline exceeded" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 00:11:32 crc kubenswrapper[4829]: E0122 00:11:32.592402 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:32 crc kubenswrapper[4829]: E0122 00:11:32.592421 4829 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:32 crc kubenswrapper[4829]: E0122 00:11:32.592444 4829 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:32 crc kubenswrapper[4829]: E0122 00:11:32.592453 4829 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:32 crc kubenswrapper[4829]: E0122 00:11:32.592519 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 00:13:34.592500248 +0000 UTC m=+392.628742160 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:32 crc kubenswrapper[4829]: E0122 00:11:32.592552 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 00:13:34.59253185 +0000 UTC m=+392.628773762 (durationBeforeRetry 2m2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : failed to sync configmap cache: timed out waiting for the condition Jan 22 00:11:33 crc kubenswrapper[4829]: I0122 00:11:33.009773 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 00:11:33 crc kubenswrapper[4829]: I0122 00:11:33.574048 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:33 crc kubenswrapper[4829]: I0122 00:11:33.574121 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:33 crc kubenswrapper[4829]: I0122 00:11:33.579263 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:34 crc kubenswrapper[4829]: I0122 00:11:34.835236 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 00:11:34 crc kubenswrapper[4829]: I0122 00:11:34.839815 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 00:11:36 crc kubenswrapper[4829]: I0122 00:11:36.394252 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 00:11:36 crc kubenswrapper[4829]: I0122 00:11:36.594774 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 00:11:36 crc kubenswrapper[4829]: I0122 00:11:36.748613 4829 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:36 crc kubenswrapper[4829]: I0122 00:11:36.769069 4829 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:36 crc kubenswrapper[4829]: I0122 00:11:36.769105 4829 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:36 crc kubenswrapper[4829]: I0122 00:11:36.774135 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:36 crc kubenswrapper[4829]: I0122 00:11:36.833482 4829 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e451dc30-d43b-4628-b336-07135ceb9039" Jan 22 00:11:37 crc kubenswrapper[4829]: I0122 00:11:37.774111 4829 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:37 crc kubenswrapper[4829]: I0122 00:11:37.774154 4829 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="80eb4c06-f6fe-4036-ba67-20a352c4c72a" Jan 22 00:11:37 crc kubenswrapper[4829]: I0122 00:11:37.777751 4829 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e451dc30-d43b-4628-b336-07135ceb9039" Jan 22 00:11:43 crc kubenswrapper[4829]: I0122 00:11:43.012961 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 00:11:44 crc kubenswrapper[4829]: I0122 00:11:44.552888 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:11:45 crc kubenswrapper[4829]: I0122 00:11:45.758796 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 00:11:45 crc kubenswrapper[4829]: I0122 00:11:45.981722 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 00:11:47 crc kubenswrapper[4829]: I0122 00:11:47.553127 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:11:47 crc kubenswrapper[4829]: I0122 00:11:47.754088 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 00:11:47 crc kubenswrapper[4829]: I0122 00:11:47.882641 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 00:11:47 crc kubenswrapper[4829]: I0122 00:11:47.882961 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 00:11:47 crc kubenswrapper[4829]: I0122 00:11:47.890725 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.064526 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.116459 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.121342 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.261966 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.302267 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.393451 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.496488 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.643558 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.653679 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.800047 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.854264 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 00:11:48 crc kubenswrapper[4829]: I0122 00:11:48.935562 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.015288 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.089784 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.176000 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.221486 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.277193 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.295160 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.396950 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.649272 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.745843 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.900134 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.912165 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.979205 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 00:11:49 crc kubenswrapper[4829]: I0122 00:11:49.995813 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.009691 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.049056 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.098942 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.208186 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.284988 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.331805 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.370035 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.408383 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.411822 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.450400 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.468704 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.513276 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.513932 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.558198 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.635629 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.669322 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.723484 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.775747 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.836316 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 00:11:50 crc kubenswrapper[4829]: I0122 00:11:50.974698 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.182521 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.260474 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.268499 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.281682 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.283480 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.330049 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.457617 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.569789 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.640128 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.752784 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.803013 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.922125 4829 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.932247 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.946400 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 00:11:51 crc kubenswrapper[4829]: I0122 00:11:51.991719 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.001104 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.229964 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.245299 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.275446 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.338671 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.435653 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.458752 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.458891 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.560991 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.709334 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.720691 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.727108 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.808228 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.820894 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.851636 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 00:11:52 crc kubenswrapper[4829]: I0122 00:11:52.905073 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.001009 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.007037 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.075570 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.123976 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.176241 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.214199 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.259926 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.273981 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.326120 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.370576 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.370579 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.512586 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.613846 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.613947 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.712675 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.740066 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.754419 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.874456 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 00:11:53 crc kubenswrapper[4829]: I0122 00:11:53.885880 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.008911 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.011257 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.170086 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.255727 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.257697 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.312218 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.333489 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.353448 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.429599 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.492668 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.517597 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.532073 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.576428 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.593399 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.598512 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.680556 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.719945 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.749613 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.800221 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 00:11:54 crc kubenswrapper[4829]: I0122 00:11:54.885055 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.032402 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.103797 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.234746 4829 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.318724 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.339675 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.341006 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.354008 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.513816 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.515744 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.529642 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.674598 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.688030 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.731301 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.733062 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.770249 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.771327 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.857168 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.949748 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.986279 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 00:11:55 crc kubenswrapper[4829]: I0122 00:11:55.987393 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.006859 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.022431 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.039888 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.041825 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.043085 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.120002 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.135679 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.144873 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.205807 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.241574 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.333776 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.371907 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.390869 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.395502 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.409940 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.434748 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.445687 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.588061 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.621578 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.638698 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.764340 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.764572 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.769476 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.816227 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.898299 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.947428 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.956688 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.974615 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 00:11:56 crc kubenswrapper[4829]: I0122 00:11:56.976385 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.017590 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.072768 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.089801 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.092761 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.277169 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.294880 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.349427 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.380269 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.409144 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.425205 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.520154 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.544752 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.575044 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.617002 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.625206 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.628339 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.698438 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 00:11:57 crc kubenswrapper[4829]: I0122 00:11:57.953751 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.004456 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.011327 4829 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.091322 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.096827 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.133650 4829 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.136470 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=43.136454834 podStartE2EDuration="43.136454834s" podCreationTimestamp="2026-01-22 00:11:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:11:36.786030672 +0000 UTC m=+274.822272584" watchObservedRunningTime="2026-01-22 00:11:58.136454834 +0000 UTC m=+296.172696766" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.140687 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.140752 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.145738 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.160882 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.160862394 podStartE2EDuration="22.160862394s" podCreationTimestamp="2026-01-22 00:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:11:58.157368872 +0000 UTC m=+296.193610824" watchObservedRunningTime="2026-01-22 00:11:58.160862394 +0000 UTC m=+296.197104306" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.292379 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.308137 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.444110 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.643376 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.824878 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.865109 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 00:11:58 crc kubenswrapper[4829]: I0122 00:11:58.883041 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.065833 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.155564 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.169351 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.187643 4829 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.187911 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb" gracePeriod=5 Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.254499 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.297758 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.417099 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.444524 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.468861 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.502213 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.503460 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.601217 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.650255 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.695421 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.719771 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.802567 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 00:11:59 crc kubenswrapper[4829]: I0122 00:11:59.976562 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.003889 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.253691 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.358149 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.383117 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.405516 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.466945 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.498599 4829 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.499992 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.542056 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.645446 4829 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.712143 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.815343 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.824352 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 00:12:00 crc kubenswrapper[4829]: I0122 00:12:00.999598 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.027060 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.217174 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.257683 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.290170 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.349620 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.467446 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.475555 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.753221 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 00:12:01 crc kubenswrapper[4829]: I0122 00:12:01.959190 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.141170 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.154843 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.220037 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.283554 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.357087 4829 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.361990 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.479309 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.594953 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 00:12:02 crc kubenswrapper[4829]: I0122 00:12:02.598709 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 00:12:03 crc kubenswrapper[4829]: I0122 00:12:03.267445 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 00:12:03 crc kubenswrapper[4829]: I0122 00:12:03.630622 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 00:12:04 crc kubenswrapper[4829]: I0122 00:12:04.423238 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.400051 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.400123 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.446015 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.446453 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.446497 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.446634 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.446692 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.446339 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.447306 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.447171 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.447266 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.455892 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.548448 4829 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.548492 4829 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.548503 4829 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.548516 4829 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.548529 4829 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.972530 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.972588 4829 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb" exitCode=137 Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.972645 4829 scope.go:117] "RemoveContainer" containerID="c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.972699 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.986914 4829 scope.go:117] "RemoveContainer" containerID="c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb" Jan 22 00:12:05 crc kubenswrapper[4829]: E0122 00:12:05.987220 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb\": container with ID starting with c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb not found: ID does not exist" containerID="c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb" Jan 22 00:12:05 crc kubenswrapper[4829]: I0122 00:12:05.987250 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb"} err="failed to get container status \"c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb\": rpc error: code = NotFound desc = could not find container \"c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb\": container with ID starting with c3cc427231d6ae9c37d13aef8c6e43c473fe7a540b4ca41c10a0883b5095fccb not found: ID does not exist" Jan 22 00:12:06 crc kubenswrapper[4829]: I0122 00:12:06.561776 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 22 00:12:06 crc kubenswrapper[4829]: I0122 00:12:06.562590 4829 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 22 00:12:06 crc kubenswrapper[4829]: I0122 00:12:06.572656 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 00:12:06 crc kubenswrapper[4829]: I0122 00:12:06.572704 4829 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="a52b442e-adb8-4db8-a79c-010dc601fac2" Jan 22 00:12:06 crc kubenswrapper[4829]: I0122 00:12:06.576702 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 00:12:06 crc kubenswrapper[4829]: I0122 00:12:06.576738 4829 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="a52b442e-adb8-4db8-a79c-010dc601fac2" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.521478 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hp7g2"] Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.522271 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hp7g2" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="registry-server" containerID="cri-o://a53a2a77947444deb73d3c0b07e3220b8da23f3051b4b81ae631065af994b89c" gracePeriod=30 Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.525336 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tw5xj"] Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.525620 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tw5xj" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="registry-server" containerID="cri-o://ac7aeb14f4b7015d809edb9e5bfa876a6ab29ca5609b2c4d24c2ace9878b54cf" gracePeriod=30 Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.529242 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w7dnn"] Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.530725 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" podUID="2dcde760-e466-49aa-a092-1385876196ef" containerName="marketplace-operator" containerID="cri-o://9375d371ad43f857534c32794260bdb25ee1236731ac7890ac1f7f40c3adacb4" gracePeriod=30 Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.537665 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bnqwm"] Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.537976 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bnqwm" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="registry-server" containerID="cri-o://9336e3dd3ba874f45733d4b6432036581d038d10ce4342db354d0d771e0dc5d8" gracePeriod=30 Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.547045 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6pznq"] Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.547659 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6pznq" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="registry-server" containerID="cri-o://17cef235973365c3cc1b0035da5eb414344c8f89bce8f0811b27638a88c9bba0" gracePeriod=30 Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.567457 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5z2d"] Jan 22 00:12:14 crc kubenswrapper[4829]: E0122 00:12:14.567687 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" containerName="installer" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.567700 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" containerName="installer" Jan 22 00:12:14 crc kubenswrapper[4829]: E0122 00:12:14.567709 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.567716 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.567809 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.567819 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6d802df-3827-44ed-96e9-d5013b03aa73" containerName="installer" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.568178 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.637015 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5z2d"] Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.721289 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.721501 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8kcl\" (UniqueName: \"kubernetes.io/projected/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-kube-api-access-p8kcl\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.721647 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.822950 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.823047 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.823082 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8kcl\" (UniqueName: \"kubernetes.io/projected/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-kube-api-access-p8kcl\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.824958 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.835586 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:14 crc kubenswrapper[4829]: I0122 00:12:14.837240 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8kcl\" (UniqueName: \"kubernetes.io/projected/61cad9e2-128b-4059-b8cb-7b024ecb5ce3-kube-api-access-p8kcl\") pod \"marketplace-operator-79b997595-k5z2d\" (UID: \"61cad9e2-128b-4059-b8cb-7b024ecb5ce3\") " pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.023911 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.030170 4829 generic.go:334] "Generic (PLEG): container finished" podID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerID="9336e3dd3ba874f45733d4b6432036581d038d10ce4342db354d0d771e0dc5d8" exitCode=0 Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.030317 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bnqwm" event={"ID":"76530c0d-9597-4099-b9c3-f375bd12b26c","Type":"ContainerDied","Data":"9336e3dd3ba874f45733d4b6432036581d038d10ce4342db354d0d771e0dc5d8"} Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.038631 4829 generic.go:334] "Generic (PLEG): container finished" podID="622d8532-4cca-4f15-972e-373735e2a5c1" containerID="ac7aeb14f4b7015d809edb9e5bfa876a6ab29ca5609b2c4d24c2ace9878b54cf" exitCode=0 Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.038820 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tw5xj" event={"ID":"622d8532-4cca-4f15-972e-373735e2a5c1","Type":"ContainerDied","Data":"ac7aeb14f4b7015d809edb9e5bfa876a6ab29ca5609b2c4d24c2ace9878b54cf"} Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.045375 4829 generic.go:334] "Generic (PLEG): container finished" podID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerID="a53a2a77947444deb73d3c0b07e3220b8da23f3051b4b81ae631065af994b89c" exitCode=0 Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.045417 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hp7g2" event={"ID":"4802cff4-7d5d-4af4-9ae0-7816748f46b3","Type":"ContainerDied","Data":"a53a2a77947444deb73d3c0b07e3220b8da23f3051b4b81ae631065af994b89c"} Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.049789 4829 generic.go:334] "Generic (PLEG): container finished" podID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerID="17cef235973365c3cc1b0035da5eb414344c8f89bce8f0811b27638a88c9bba0" exitCode=0 Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.049987 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pznq" event={"ID":"08ce8375-3a53-43e6-a7c1-3ce32698965d","Type":"ContainerDied","Data":"17cef235973365c3cc1b0035da5eb414344c8f89bce8f0811b27638a88c9bba0"} Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.052899 4829 generic.go:334] "Generic (PLEG): container finished" podID="2dcde760-e466-49aa-a092-1385876196ef" containerID="9375d371ad43f857534c32794260bdb25ee1236731ac7890ac1f7f40c3adacb4" exitCode=0 Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.052936 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" event={"ID":"2dcde760-e466-49aa-a092-1385876196ef","Type":"ContainerDied","Data":"9375d371ad43f857534c32794260bdb25ee1236731ac7890ac1f7f40c3adacb4"} Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.164199 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.221470 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.229284 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-utilities\") pod \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.229416 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm4pv\" (UniqueName: \"kubernetes.io/projected/4802cff4-7d5d-4af4-9ae0-7816748f46b3-kube-api-access-dm4pv\") pod \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.229471 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-catalog-content\") pod \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\" (UID: \"4802cff4-7d5d-4af4-9ae0-7816748f46b3\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.233209 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-utilities" (OuterVolumeSpecName: "utilities") pod "4802cff4-7d5d-4af4-9ae0-7816748f46b3" (UID: "4802cff4-7d5d-4af4-9ae0-7816748f46b3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.234293 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.236989 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.253356 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4802cff4-7d5d-4af4-9ae0-7816748f46b3-kube-api-access-dm4pv" (OuterVolumeSpecName: "kube-api-access-dm4pv") pod "4802cff4-7d5d-4af4-9ae0-7816748f46b3" (UID: "4802cff4-7d5d-4af4-9ae0-7816748f46b3"). InnerVolumeSpecName "kube-api-access-dm4pv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.294680 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4802cff4-7d5d-4af4-9ae0-7816748f46b3" (UID: "4802cff4-7d5d-4af4-9ae0-7816748f46b3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.335740 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq6nc\" (UniqueName: \"kubernetes.io/projected/622d8532-4cca-4f15-972e-373735e2a5c1-kube-api-access-xq6nc\") pod \"622d8532-4cca-4f15-972e-373735e2a5c1\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.335819 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-utilities\") pod \"622d8532-4cca-4f15-972e-373735e2a5c1\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.335880 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfhw9\" (UniqueName: \"kubernetes.io/projected/2dcde760-e466-49aa-a092-1385876196ef-kube-api-access-tfhw9\") pod \"2dcde760-e466-49aa-a092-1385876196ef\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.335940 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dcde760-e466-49aa-a092-1385876196ef-marketplace-trusted-ca\") pod \"2dcde760-e466-49aa-a092-1385876196ef\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.335968 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2dcde760-e466-49aa-a092-1385876196ef-marketplace-operator-metrics\") pod \"2dcde760-e466-49aa-a092-1385876196ef\" (UID: \"2dcde760-e466-49aa-a092-1385876196ef\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.336003 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-catalog-content\") pod \"622d8532-4cca-4f15-972e-373735e2a5c1\" (UID: \"622d8532-4cca-4f15-972e-373735e2a5c1\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.336221 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4802cff4-7d5d-4af4-9ae0-7816748f46b3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.336232 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm4pv\" (UniqueName: \"kubernetes.io/projected/4802cff4-7d5d-4af4-9ae0-7816748f46b3-kube-api-access-dm4pv\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.336793 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2dcde760-e466-49aa-a092-1385876196ef-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "2dcde760-e466-49aa-a092-1385876196ef" (UID: "2dcde760-e466-49aa-a092-1385876196ef"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.336988 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-utilities" (OuterVolumeSpecName: "utilities") pod "622d8532-4cca-4f15-972e-373735e2a5c1" (UID: "622d8532-4cca-4f15-972e-373735e2a5c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.342488 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/622d8532-4cca-4f15-972e-373735e2a5c1-kube-api-access-xq6nc" (OuterVolumeSpecName: "kube-api-access-xq6nc") pod "622d8532-4cca-4f15-972e-373735e2a5c1" (UID: "622d8532-4cca-4f15-972e-373735e2a5c1"). InnerVolumeSpecName "kube-api-access-xq6nc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.342721 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2dcde760-e466-49aa-a092-1385876196ef-kube-api-access-tfhw9" (OuterVolumeSpecName: "kube-api-access-tfhw9") pod "2dcde760-e466-49aa-a092-1385876196ef" (UID: "2dcde760-e466-49aa-a092-1385876196ef"). InnerVolumeSpecName "kube-api-access-tfhw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.343366 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2dcde760-e466-49aa-a092-1385876196ef-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "2dcde760-e466-49aa-a092-1385876196ef" (UID: "2dcde760-e466-49aa-a092-1385876196ef"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.400211 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "622d8532-4cca-4f15-972e-373735e2a5c1" (UID: "622d8532-4cca-4f15-972e-373735e2a5c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.437864 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq6nc\" (UniqueName: \"kubernetes.io/projected/622d8532-4cca-4f15-972e-373735e2a5c1-kube-api-access-xq6nc\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.437900 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.437912 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfhw9\" (UniqueName: \"kubernetes.io/projected/2dcde760-e466-49aa-a092-1385876196ef-kube-api-access-tfhw9\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.437973 4829 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2dcde760-e466-49aa-a092-1385876196ef-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.437986 4829 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/2dcde760-e466-49aa-a092-1385876196ef-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.437997 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/622d8532-4cca-4f15-972e-373735e2a5c1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.499651 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-k5z2d"] Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.649153 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.742902 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.747344 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhrsr\" (UniqueName: \"kubernetes.io/projected/76530c0d-9597-4099-b9c3-f375bd12b26c-kube-api-access-fhrsr\") pod \"76530c0d-9597-4099-b9c3-f375bd12b26c\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.747490 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-catalog-content\") pod \"76530c0d-9597-4099-b9c3-f375bd12b26c\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.747579 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-utilities\") pod \"76530c0d-9597-4099-b9c3-f375bd12b26c\" (UID: \"76530c0d-9597-4099-b9c3-f375bd12b26c\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.748653 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-utilities" (OuterVolumeSpecName: "utilities") pod "76530c0d-9597-4099-b9c3-f375bd12b26c" (UID: "76530c0d-9597-4099-b9c3-f375bd12b26c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.748977 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.751915 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76530c0d-9597-4099-b9c3-f375bd12b26c-kube-api-access-fhrsr" (OuterVolumeSpecName: "kube-api-access-fhrsr") pod "76530c0d-9597-4099-b9c3-f375bd12b26c" (UID: "76530c0d-9597-4099-b9c3-f375bd12b26c"). InnerVolumeSpecName "kube-api-access-fhrsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.770737 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "76530c0d-9597-4099-b9c3-f375bd12b26c" (UID: "76530c0d-9597-4099-b9c3-f375bd12b26c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.849456 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-catalog-content\") pod \"08ce8375-3a53-43e6-a7c1-3ce32698965d\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.850034 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6llc\" (UniqueName: \"kubernetes.io/projected/08ce8375-3a53-43e6-a7c1-3ce32698965d-kube-api-access-v6llc\") pod \"08ce8375-3a53-43e6-a7c1-3ce32698965d\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.850638 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-utilities\") pod \"08ce8375-3a53-43e6-a7c1-3ce32698965d\" (UID: \"08ce8375-3a53-43e6-a7c1-3ce32698965d\") " Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.850737 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-utilities" (OuterVolumeSpecName: "utilities") pod "08ce8375-3a53-43e6-a7c1-3ce32698965d" (UID: "08ce8375-3a53-43e6-a7c1-3ce32698965d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.851393 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhrsr\" (UniqueName: \"kubernetes.io/projected/76530c0d-9597-4099-b9c3-f375bd12b26c-kube-api-access-fhrsr\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.851413 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.851426 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/76530c0d-9597-4099-b9c3-f375bd12b26c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.853780 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08ce8375-3a53-43e6-a7c1-3ce32698965d-kube-api-access-v6llc" (OuterVolumeSpecName: "kube-api-access-v6llc") pod "08ce8375-3a53-43e6-a7c1-3ce32698965d" (UID: "08ce8375-3a53-43e6-a7c1-3ce32698965d"). InnerVolumeSpecName "kube-api-access-v6llc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.953361 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6llc\" (UniqueName: \"kubernetes.io/projected/08ce8375-3a53-43e6-a7c1-3ce32698965d-kube-api-access-v6llc\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:15 crc kubenswrapper[4829]: I0122 00:12:15.970722 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08ce8375-3a53-43e6-a7c1-3ce32698965d" (UID: "08ce8375-3a53-43e6-a7c1-3ce32698965d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.054494 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08ce8375-3a53-43e6-a7c1-3ce32698965d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.060893 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" event={"ID":"2dcde760-e466-49aa-a092-1385876196ef","Type":"ContainerDied","Data":"439d5dc1d516cdde09c349d7ad153ba4049fda1218d57d75bbae72bd66dc0f41"} Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.060922 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-w7dnn" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.060957 4829 scope.go:117] "RemoveContainer" containerID="9375d371ad43f857534c32794260bdb25ee1236731ac7890ac1f7f40c3adacb4" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.064240 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bnqwm" event={"ID":"76530c0d-9597-4099-b9c3-f375bd12b26c","Type":"ContainerDied","Data":"dcdde47aa6ff1f296b8d94867a012040a5905344d3c78a1c00d99d3208fdad81"} Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.064251 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bnqwm" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.067125 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tw5xj" event={"ID":"622d8532-4cca-4f15-972e-373735e2a5c1","Type":"ContainerDied","Data":"596ea893f59b47533e844e2f055e9b849309e579c1fb69da23f34769af27fdde"} Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.067203 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tw5xj" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.068929 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hp7g2" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.068927 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hp7g2" event={"ID":"4802cff4-7d5d-4af4-9ae0-7816748f46b3","Type":"ContainerDied","Data":"96cf8cb0530c5093268e4a53d648da6485d40abc0d8f45d7a553dc323f4d59d9"} Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.074032 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6pznq" event={"ID":"08ce8375-3a53-43e6-a7c1-3ce32698965d","Type":"ContainerDied","Data":"016f3fa5e913461945b1f5cf11f1639ada0f54134a0b9795df89c831fd8e6e29"} Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.074088 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6pznq" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.078787 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" event={"ID":"61cad9e2-128b-4059-b8cb-7b024ecb5ce3","Type":"ContainerStarted","Data":"5dd70941342af08edf69a0c69651998f4e4bbe13cd75219a22cc0ba99610aa21"} Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.078832 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" event={"ID":"61cad9e2-128b-4059-b8cb-7b024ecb5ce3","Type":"ContainerStarted","Data":"00aee3362c50ef3f27b69749605cf3ed8dfa89aa271d5c40bdaf7732a5a839fe"} Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.079105 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.088016 4829 scope.go:117] "RemoveContainer" containerID="9336e3dd3ba874f45733d4b6432036581d038d10ce4342db354d0d771e0dc5d8" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.091631 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.100381 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-k5z2d" podStartSLOduration=2.100364653 podStartE2EDuration="2.100364653s" podCreationTimestamp="2026-01-22 00:12:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:12:16.096481687 +0000 UTC m=+314.132723609" watchObservedRunningTime="2026-01-22 00:12:16.100364653 +0000 UTC m=+314.136606565" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.131526 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w7dnn"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.146793 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-w7dnn"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.147703 4829 scope.go:117] "RemoveContainer" containerID="2f14618f7a12c46c26c468c1e311019e55b930a614715c7a2f150b3cc2102196" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.170221 4829 scope.go:117] "RemoveContainer" containerID="1c986d433a96ac6f44c57098d21ecb2d1ead18c25957402b311df1be01c91656" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.185238 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6pznq"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.195081 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6pznq"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.210766 4829 scope.go:117] "RemoveContainer" containerID="ac7aeb14f4b7015d809edb9e5bfa876a6ab29ca5609b2c4d24c2ace9878b54cf" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.212685 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bnqwm"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.220138 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bnqwm"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.225531 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tw5xj"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.229093 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tw5xj"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.232277 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hp7g2"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.236842 4829 scope.go:117] "RemoveContainer" containerID="5aac395d0f5563af6ede521071dd5fca4f388def893bcc301c03a1e93638cdc6" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.237673 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hp7g2"] Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.251028 4829 scope.go:117] "RemoveContainer" containerID="97af89874a5fdaeb3864694d13cde484d690d091e5b369556f1ac47ecf82bb57" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.262231 4829 scope.go:117] "RemoveContainer" containerID="a53a2a77947444deb73d3c0b07e3220b8da23f3051b4b81ae631065af994b89c" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.273302 4829 scope.go:117] "RemoveContainer" containerID="1e8b407a1c7e3a5260d4beabaec5afa621a87ce0e49250b7dbc699abbcfca79f" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.285874 4829 scope.go:117] "RemoveContainer" containerID="be6289065fa01ac9048c6f859a36863b9e631091b3ff679cac01d94d4d0c6496" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.298209 4829 scope.go:117] "RemoveContainer" containerID="17cef235973365c3cc1b0035da5eb414344c8f89bce8f0811b27638a88c9bba0" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.309909 4829 scope.go:117] "RemoveContainer" containerID="5ea9aedf502e1d75ffa38059f395d4fd49a1e52df50beb7f72a2b42ad8bed88d" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.324775 4829 scope.go:117] "RemoveContainer" containerID="c3be5b291940743def002ca77afc0710f2e9686dc720ae4583e50bda28bea922" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.559053 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" path="/var/lib/kubelet/pods/08ce8375-3a53-43e6-a7c1-3ce32698965d/volumes" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.559679 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2dcde760-e466-49aa-a092-1385876196ef" path="/var/lib/kubelet/pods/2dcde760-e466-49aa-a092-1385876196ef/volumes" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.560098 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" path="/var/lib/kubelet/pods/4802cff4-7d5d-4af4-9ae0-7816748f46b3/volumes" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.561067 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" path="/var/lib/kubelet/pods/622d8532-4cca-4f15-972e-373735e2a5c1/volumes" Jan 22 00:12:16 crc kubenswrapper[4829]: I0122 00:12:16.561620 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" path="/var/lib/kubelet/pods/76530c0d-9597-4099-b9c3-f375bd12b26c/volumes" Jan 22 00:12:21 crc kubenswrapper[4829]: I0122 00:12:21.575277 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w"] Jan 22 00:12:21 crc kubenswrapper[4829]: I0122 00:12:21.575815 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" podUID="9b59157d-3368-4c08-b2c2-22d6cf19e0fc" containerName="controller-manager" containerID="cri-o://5977512327a76843f9c2cd203a229cb74018957ea37c0b6af07222ae2819865d" gracePeriod=30 Jan 22 00:12:21 crc kubenswrapper[4829]: I0122 00:12:21.678958 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt"] Jan 22 00:12:21 crc kubenswrapper[4829]: I0122 00:12:21.679477 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" podUID="17f14648-938f-4bad-aab2-7d733bd29059" containerName="route-controller-manager" containerID="cri-o://f57d688dc66b4b357f3ed506a02af6af2d3213fe1f131b3c0cb451e8850bf4b6" gracePeriod=30 Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.119810 4829 generic.go:334] "Generic (PLEG): container finished" podID="17f14648-938f-4bad-aab2-7d733bd29059" containerID="f57d688dc66b4b357f3ed506a02af6af2d3213fe1f131b3c0cb451e8850bf4b6" exitCode=0 Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.119894 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" event={"ID":"17f14648-938f-4bad-aab2-7d733bd29059","Type":"ContainerDied","Data":"f57d688dc66b4b357f3ed506a02af6af2d3213fe1f131b3c0cb451e8850bf4b6"} Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.121457 4829 generic.go:334] "Generic (PLEG): container finished" podID="9b59157d-3368-4c08-b2c2-22d6cf19e0fc" containerID="5977512327a76843f9c2cd203a229cb74018957ea37c0b6af07222ae2819865d" exitCode=0 Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.121481 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" event={"ID":"9b59157d-3368-4c08-b2c2-22d6cf19e0fc","Type":"ContainerDied","Data":"5977512327a76843f9c2cd203a229cb74018957ea37c0b6af07222ae2819865d"} Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.443481 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.532322 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.537724 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-serving-cert\") pod \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.537842 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-proxy-ca-bundles\") pod \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.537887 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-client-ca\") pod \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.537930 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hq7p9\" (UniqueName: \"kubernetes.io/projected/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-kube-api-access-hq7p9\") pod \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.538021 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-config\") pod \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\" (UID: \"9b59157d-3368-4c08-b2c2-22d6cf19e0fc\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.538635 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-client-ca" (OuterVolumeSpecName: "client-ca") pod "9b59157d-3368-4c08-b2c2-22d6cf19e0fc" (UID: "9b59157d-3368-4c08-b2c2-22d6cf19e0fc"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.538651 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "9b59157d-3368-4c08-b2c2-22d6cf19e0fc" (UID: "9b59157d-3368-4c08-b2c2-22d6cf19e0fc"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.539289 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.539316 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.539315 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-config" (OuterVolumeSpecName: "config") pod "9b59157d-3368-4c08-b2c2-22d6cf19e0fc" (UID: "9b59157d-3368-4c08-b2c2-22d6cf19e0fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.545600 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-kube-api-access-hq7p9" (OuterVolumeSpecName: "kube-api-access-hq7p9") pod "9b59157d-3368-4c08-b2c2-22d6cf19e0fc" (UID: "9b59157d-3368-4c08-b2c2-22d6cf19e0fc"). InnerVolumeSpecName "kube-api-access-hq7p9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.545615 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9b59157d-3368-4c08-b2c2-22d6cf19e0fc" (UID: "9b59157d-3368-4c08-b2c2-22d6cf19e0fc"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.640558 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-client-ca\") pod \"17f14648-938f-4bad-aab2-7d733bd29059\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.640656 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcw9h\" (UniqueName: \"kubernetes.io/projected/17f14648-938f-4bad-aab2-7d733bd29059-kube-api-access-fcw9h\") pod \"17f14648-938f-4bad-aab2-7d733bd29059\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.640734 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17f14648-938f-4bad-aab2-7d733bd29059-serving-cert\") pod \"17f14648-938f-4bad-aab2-7d733bd29059\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.640774 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-config\") pod \"17f14648-938f-4bad-aab2-7d733bd29059\" (UID: \"17f14648-938f-4bad-aab2-7d733bd29059\") " Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.641118 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hq7p9\" (UniqueName: \"kubernetes.io/projected/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-kube-api-access-hq7p9\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.641136 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.641150 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9b59157d-3368-4c08-b2c2-22d6cf19e0fc-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.641626 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-client-ca" (OuterVolumeSpecName: "client-ca") pod "17f14648-938f-4bad-aab2-7d733bd29059" (UID: "17f14648-938f-4bad-aab2-7d733bd29059"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.641635 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-config" (OuterVolumeSpecName: "config") pod "17f14648-938f-4bad-aab2-7d733bd29059" (UID: "17f14648-938f-4bad-aab2-7d733bd29059"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.644129 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17f14648-938f-4bad-aab2-7d733bd29059-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "17f14648-938f-4bad-aab2-7d733bd29059" (UID: "17f14648-938f-4bad-aab2-7d733bd29059"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.645565 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17f14648-938f-4bad-aab2-7d733bd29059-kube-api-access-fcw9h" (OuterVolumeSpecName: "kube-api-access-fcw9h") pod "17f14648-938f-4bad-aab2-7d733bd29059" (UID: "17f14648-938f-4bad-aab2-7d733bd29059"). InnerVolumeSpecName "kube-api-access-fcw9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.742489 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.742869 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcw9h\" (UniqueName: \"kubernetes.io/projected/17f14648-938f-4bad-aab2-7d733bd29059-kube-api-access-fcw9h\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.742885 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17f14648-938f-4bad-aab2-7d733bd29059-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:22 crc kubenswrapper[4829]: I0122 00:12:22.743241 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f14648-938f-4bad-aab2-7d733bd29059-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.073670 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-lmfv9"] Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074032 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074051 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074078 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074087 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074095 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17f14648-938f-4bad-aab2-7d733bd29059" containerName="route-controller-manager" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074101 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="17f14648-938f-4bad-aab2-7d733bd29059" containerName="route-controller-manager" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074111 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074117 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074124 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074130 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074138 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2dcde760-e466-49aa-a092-1385876196ef" containerName="marketplace-operator" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074143 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="2dcde760-e466-49aa-a092-1385876196ef" containerName="marketplace-operator" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074150 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074156 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074162 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074168 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074175 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074181 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="extract-content" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074191 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074197 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074204 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074210 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074219 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074224 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074231 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074237 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074246 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074251 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="extract-utilities" Jan 22 00:12:23 crc kubenswrapper[4829]: E0122 00:12:23.074261 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b59157d-3368-4c08-b2c2-22d6cf19e0fc" containerName="controller-manager" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074268 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b59157d-3368-4c08-b2c2-22d6cf19e0fc" containerName="controller-manager" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074350 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="08ce8375-3a53-43e6-a7c1-3ce32698965d" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074361 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="17f14648-938f-4bad-aab2-7d733bd29059" containerName="route-controller-manager" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074368 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="76530c0d-9597-4099-b9c3-f375bd12b26c" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074375 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="4802cff4-7d5d-4af4-9ae0-7816748f46b3" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074382 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="622d8532-4cca-4f15-972e-373735e2a5c1" containerName="registry-server" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074390 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="2dcde760-e466-49aa-a092-1385876196ef" containerName="marketplace-operator" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.074402 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b59157d-3368-4c08-b2c2-22d6cf19e0fc" containerName="controller-manager" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.075812 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.078261 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.079208 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.083110 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-lmfv9"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.090442 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.128912 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.132271 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt" event={"ID":"17f14648-938f-4bad-aab2-7d733bd29059","Type":"ContainerDied","Data":"861b6a1300771a3adcb3ec277f7b6a601186d1a73dfd4bc052214150fd4771e2"} Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.132322 4829 scope.go:117] "RemoveContainer" containerID="f57d688dc66b4b357f3ed506a02af6af2d3213fe1f131b3c0cb451e8850bf4b6" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.142285 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" event={"ID":"9b59157d-3368-4c08-b2c2-22d6cf19e0fc","Type":"ContainerDied","Data":"4f1fb19a4ac9baaa8ef4f01a832092a092558737dc04fa7bb7fbbf68f917a1e5"} Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.142339 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.148588 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvpjz\" (UniqueName: \"kubernetes.io/projected/e69d04d0-ec93-4501-8d79-f19e886a50b2-kube-api-access-nvpjz\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.148671 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94cfa186-41f8-4320-a524-cacaee1fe270-serving-cert\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.148794 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfkjv\" (UniqueName: \"kubernetes.io/projected/94cfa186-41f8-4320-a524-cacaee1fe270-kube-api-access-sfkjv\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.148830 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-proxy-ca-bundles\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.148991 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-client-ca\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.149022 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-client-ca\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.149104 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e69d04d0-ec93-4501-8d79-f19e886a50b2-serving-cert\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.149184 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-config\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.149223 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-config\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.158939 4829 scope.go:117] "RemoveContainer" containerID="5977512327a76843f9c2cd203a229cb74018957ea37c0b6af07222ae2819865d" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.162741 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.174574 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7c9d5cf4bd-jvh4w"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.181329 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.185411 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-d57fd7544-r2hnt"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.250081 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfkjv\" (UniqueName: \"kubernetes.io/projected/94cfa186-41f8-4320-a524-cacaee1fe270-kube-api-access-sfkjv\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.250160 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-proxy-ca-bundles\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.250194 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-client-ca\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.250292 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-client-ca\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251554 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-client-ca\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251580 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-proxy-ca-bundles\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251553 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-client-ca\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251638 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e69d04d0-ec93-4501-8d79-f19e886a50b2-serving-cert\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251708 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-config\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251759 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-config\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251800 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvpjz\" (UniqueName: \"kubernetes.io/projected/e69d04d0-ec93-4501-8d79-f19e886a50b2-kube-api-access-nvpjz\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.251842 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94cfa186-41f8-4320-a524-cacaee1fe270-serving-cert\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.254510 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-config\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.254757 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-config\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.258080 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94cfa186-41f8-4320-a524-cacaee1fe270-serving-cert\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.258127 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e69d04d0-ec93-4501-8d79-f19e886a50b2-serving-cert\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.274914 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfkjv\" (UniqueName: \"kubernetes.io/projected/94cfa186-41f8-4320-a524-cacaee1fe270-kube-api-access-sfkjv\") pod \"route-controller-manager-bb6b49549-65g4w\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.283251 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvpjz\" (UniqueName: \"kubernetes.io/projected/e69d04d0-ec93-4501-8d79-f19e886a50b2-kube-api-access-nvpjz\") pod \"controller-manager-79d8647b8b-lmfv9\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.394447 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.404408 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.595129 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w"] Jan 22 00:12:23 crc kubenswrapper[4829]: I0122 00:12:23.828836 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-lmfv9"] Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.149709 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" event={"ID":"94cfa186-41f8-4320-a524-cacaee1fe270","Type":"ContainerStarted","Data":"cddb759710663edc63e3c0948be491236acde8ecc6649074195d5e144b5bc4de"} Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.149763 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" event={"ID":"94cfa186-41f8-4320-a524-cacaee1fe270","Type":"ContainerStarted","Data":"cc3c243b6041c776a00122b6f863c107f0d18b0d912ca24e2dc9552bba3e9cd1"} Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.150019 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.152100 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" event={"ID":"e69d04d0-ec93-4501-8d79-f19e886a50b2","Type":"ContainerStarted","Data":"4595e63b6cfb7c51513a32a30bd8486c0d9d8d7f09c69d73d431c219440fe1d7"} Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.153578 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" event={"ID":"e69d04d0-ec93-4501-8d79-f19e886a50b2","Type":"ContainerStarted","Data":"3ce8beca2efc0fec02970fd08ffd5d5dd708176031a1c64214fd10df1875d261"} Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.153608 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.157864 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.172316 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" podStartSLOduration=3.172289874 podStartE2EDuration="3.172289874s" podCreationTimestamp="2026-01-22 00:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:12:24.170046776 +0000 UTC m=+322.206288698" watchObservedRunningTime="2026-01-22 00:12:24.172289874 +0000 UTC m=+322.208531806" Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.187954 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.193732 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" podStartSLOduration=3.193711189 podStartE2EDuration="3.193711189s" podCreationTimestamp="2026-01-22 00:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:12:24.190398524 +0000 UTC m=+322.226640436" watchObservedRunningTime="2026-01-22 00:12:24.193711189 +0000 UTC m=+322.229953121" Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.568306 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17f14648-938f-4bad-aab2-7d733bd29059" path="/var/lib/kubelet/pods/17f14648-938f-4bad-aab2-7d733bd29059/volumes" Jan 22 00:12:24 crc kubenswrapper[4829]: I0122 00:12:24.569019 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b59157d-3368-4c08-b2c2-22d6cf19e0fc" path="/var/lib/kubelet/pods/9b59157d-3368-4c08-b2c2-22d6cf19e0fc/volumes" Jan 22 00:12:41 crc kubenswrapper[4829]: I0122 00:12:41.615971 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-lmfv9"] Jan 22 00:12:41 crc kubenswrapper[4829]: I0122 00:12:41.616563 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" podUID="e69d04d0-ec93-4501-8d79-f19e886a50b2" containerName="controller-manager" containerID="cri-o://4595e63b6cfb7c51513a32a30bd8486c0d9d8d7f09c69d73d431c219440fe1d7" gracePeriod=30 Jan 22 00:12:41 crc kubenswrapper[4829]: I0122 00:12:41.648265 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w"] Jan 22 00:12:41 crc kubenswrapper[4829]: I0122 00:12:41.648567 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" podUID="94cfa186-41f8-4320-a524-cacaee1fe270" containerName="route-controller-manager" containerID="cri-o://cddb759710663edc63e3c0948be491236acde8ecc6649074195d5e144b5bc4de" gracePeriod=30 Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.261290 4829 generic.go:334] "Generic (PLEG): container finished" podID="94cfa186-41f8-4320-a524-cacaee1fe270" containerID="cddb759710663edc63e3c0948be491236acde8ecc6649074195d5e144b5bc4de" exitCode=0 Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.261388 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" event={"ID":"94cfa186-41f8-4320-a524-cacaee1fe270","Type":"ContainerDied","Data":"cddb759710663edc63e3c0948be491236acde8ecc6649074195d5e144b5bc4de"} Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.263005 4829 generic.go:334] "Generic (PLEG): container finished" podID="e69d04d0-ec93-4501-8d79-f19e886a50b2" containerID="4595e63b6cfb7c51513a32a30bd8486c0d9d8d7f09c69d73d431c219440fe1d7" exitCode=0 Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.263039 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" event={"ID":"e69d04d0-ec93-4501-8d79-f19e886a50b2","Type":"ContainerDied","Data":"4595e63b6cfb7c51513a32a30bd8486c0d9d8d7f09c69d73d431c219440fe1d7"} Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.859758 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.902766 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5"] Jan 22 00:12:42 crc kubenswrapper[4829]: E0122 00:12:42.903069 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94cfa186-41f8-4320-a524-cacaee1fe270" containerName="route-controller-manager" Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.903090 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="94cfa186-41f8-4320-a524-cacaee1fe270" containerName="route-controller-manager" Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.903249 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="94cfa186-41f8-4320-a524-cacaee1fe270" containerName="route-controller-manager" Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.903716 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.909062 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5"] Jan 22 00:12:42 crc kubenswrapper[4829]: I0122 00:12:42.997212 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024144 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94cfa186-41f8-4320-a524-cacaee1fe270-serving-cert\") pod \"94cfa186-41f8-4320-a524-cacaee1fe270\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024194 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-client-ca\") pod \"94cfa186-41f8-4320-a524-cacaee1fe270\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024279 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfkjv\" (UniqueName: \"kubernetes.io/projected/94cfa186-41f8-4320-a524-cacaee1fe270-kube-api-access-sfkjv\") pod \"94cfa186-41f8-4320-a524-cacaee1fe270\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024331 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-config\") pod \"94cfa186-41f8-4320-a524-cacaee1fe270\" (UID: \"94cfa186-41f8-4320-a524-cacaee1fe270\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024520 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4fe0441-5159-457b-b6c7-0ffd168435f2-serving-cert\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024563 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-config\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024649 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-client-ca\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.024681 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qh79\" (UniqueName: \"kubernetes.io/projected/e4fe0441-5159-457b-b6c7-0ffd168435f2-kube-api-access-2qh79\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.025317 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-client-ca" (OuterVolumeSpecName: "client-ca") pod "94cfa186-41f8-4320-a524-cacaee1fe270" (UID: "94cfa186-41f8-4320-a524-cacaee1fe270"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.026216 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-config" (OuterVolumeSpecName: "config") pod "94cfa186-41f8-4320-a524-cacaee1fe270" (UID: "94cfa186-41f8-4320-a524-cacaee1fe270"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.029402 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94cfa186-41f8-4320-a524-cacaee1fe270-kube-api-access-sfkjv" (OuterVolumeSpecName: "kube-api-access-sfkjv") pod "94cfa186-41f8-4320-a524-cacaee1fe270" (UID: "94cfa186-41f8-4320-a524-cacaee1fe270"). InnerVolumeSpecName "kube-api-access-sfkjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.029573 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94cfa186-41f8-4320-a524-cacaee1fe270-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "94cfa186-41f8-4320-a524-cacaee1fe270" (UID: "94cfa186-41f8-4320-a524-cacaee1fe270"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.126534 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-proxy-ca-bundles\") pod \"e69d04d0-ec93-4501-8d79-f19e886a50b2\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.126675 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-config\") pod \"e69d04d0-ec93-4501-8d79-f19e886a50b2\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.126756 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvpjz\" (UniqueName: \"kubernetes.io/projected/e69d04d0-ec93-4501-8d79-f19e886a50b2-kube-api-access-nvpjz\") pod \"e69d04d0-ec93-4501-8d79-f19e886a50b2\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.126879 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e69d04d0-ec93-4501-8d79-f19e886a50b2-serving-cert\") pod \"e69d04d0-ec93-4501-8d79-f19e886a50b2\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.126949 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-client-ca\") pod \"e69d04d0-ec93-4501-8d79-f19e886a50b2\" (UID: \"e69d04d0-ec93-4501-8d79-f19e886a50b2\") " Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.127363 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-client-ca\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.127424 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qh79\" (UniqueName: \"kubernetes.io/projected/e4fe0441-5159-457b-b6c7-0ffd168435f2-kube-api-access-2qh79\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.127447 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4fe0441-5159-457b-b6c7-0ffd168435f2-serving-cert\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.127465 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-config\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.128361 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e69d04d0-ec93-4501-8d79-f19e886a50b2" (UID: "e69d04d0-ec93-4501-8d79-f19e886a50b2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.128491 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-config" (OuterVolumeSpecName: "config") pod "e69d04d0-ec93-4501-8d79-f19e886a50b2" (UID: "e69d04d0-ec93-4501-8d79-f19e886a50b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.128723 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-client-ca" (OuterVolumeSpecName: "client-ca") pod "e69d04d0-ec93-4501-8d79-f19e886a50b2" (UID: "e69d04d0-ec93-4501-8d79-f19e886a50b2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129181 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-client-ca\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129309 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129327 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfkjv\" (UniqueName: \"kubernetes.io/projected/94cfa186-41f8-4320-a524-cacaee1fe270-kube-api-access-sfkjv\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129338 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94cfa186-41f8-4320-a524-cacaee1fe270-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129347 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129356 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129365 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e69d04d0-ec93-4501-8d79-f19e886a50b2-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129375 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/94cfa186-41f8-4320-a524-cacaee1fe270-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.129488 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-config\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.130480 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e69d04d0-ec93-4501-8d79-f19e886a50b2-kube-api-access-nvpjz" (OuterVolumeSpecName: "kube-api-access-nvpjz") pod "e69d04d0-ec93-4501-8d79-f19e886a50b2" (UID: "e69d04d0-ec93-4501-8d79-f19e886a50b2"). InnerVolumeSpecName "kube-api-access-nvpjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.130874 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e69d04d0-ec93-4501-8d79-f19e886a50b2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e69d04d0-ec93-4501-8d79-f19e886a50b2" (UID: "e69d04d0-ec93-4501-8d79-f19e886a50b2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.132929 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4fe0441-5159-457b-b6c7-0ffd168435f2-serving-cert\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.144861 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qh79\" (UniqueName: \"kubernetes.io/projected/e4fe0441-5159-457b-b6c7-0ffd168435f2-kube-api-access-2qh79\") pod \"route-controller-manager-66d9b996-tcgh5\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.230661 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e69d04d0-ec93-4501-8d79-f19e886a50b2-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.230715 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvpjz\" (UniqueName: \"kubernetes.io/projected/e69d04d0-ec93-4501-8d79-f19e886a50b2-kube-api-access-nvpjz\") on node \"crc\" DevicePath \"\"" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.230931 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.277428 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.277418 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8647b8b-lmfv9" event={"ID":"e69d04d0-ec93-4501-8d79-f19e886a50b2","Type":"ContainerDied","Data":"3ce8beca2efc0fec02970fd08ffd5d5dd708176031a1c64214fd10df1875d261"} Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.277528 4829 scope.go:117] "RemoveContainer" containerID="4595e63b6cfb7c51513a32a30bd8486c0d9d8d7f09c69d73d431c219440fe1d7" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.281551 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" event={"ID":"94cfa186-41f8-4320-a524-cacaee1fe270","Type":"ContainerDied","Data":"cc3c243b6041c776a00122b6f863c107f0d18b0d912ca24e2dc9552bba3e9cd1"} Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.281676 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.308884 4829 scope.go:117] "RemoveContainer" containerID="cddb759710663edc63e3c0948be491236acde8ecc6649074195d5e144b5bc4de" Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.328608 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-lmfv9"] Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.343207 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-lmfv9"] Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.346530 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w"] Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.349720 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-65g4w"] Jan 22 00:12:43 crc kubenswrapper[4829]: I0122 00:12:43.709842 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5"] Jan 22 00:12:43 crc kubenswrapper[4829]: W0122 00:12:43.717435 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4fe0441_5159_457b_b6c7_0ffd168435f2.slice/crio-eb1d77db8c22972d929c2b1c81a51fea47b2b89a454001dd93d72f9d8ae8ab5b WatchSource:0}: Error finding container eb1d77db8c22972d929c2b1c81a51fea47b2b89a454001dd93d72f9d8ae8ab5b: Status 404 returned error can't find the container with id eb1d77db8c22972d929c2b1c81a51fea47b2b89a454001dd93d72f9d8ae8ab5b Jan 22 00:12:44 crc kubenswrapper[4829]: I0122 00:12:44.288176 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" event={"ID":"e4fe0441-5159-457b-b6c7-0ffd168435f2","Type":"ContainerStarted","Data":"9fa75420709da61d13ebf183e3f5c501a0fb3b2cdd87d05eaba5569e7a7bcf2b"} Jan 22 00:12:44 crc kubenswrapper[4829]: I0122 00:12:44.288619 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:44 crc kubenswrapper[4829]: I0122 00:12:44.288644 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" event={"ID":"e4fe0441-5159-457b-b6c7-0ffd168435f2","Type":"ContainerStarted","Data":"eb1d77db8c22972d929c2b1c81a51fea47b2b89a454001dd93d72f9d8ae8ab5b"} Jan 22 00:12:44 crc kubenswrapper[4829]: I0122 00:12:44.315265 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" podStartSLOduration=3.315247557 podStartE2EDuration="3.315247557s" podCreationTimestamp="2026-01-22 00:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:12:44.310185633 +0000 UTC m=+342.346427565" watchObservedRunningTime="2026-01-22 00:12:44.315247557 +0000 UTC m=+342.351489479" Jan 22 00:12:44 crc kubenswrapper[4829]: I0122 00:12:44.359802 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:12:44 crc kubenswrapper[4829]: I0122 00:12:44.570597 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94cfa186-41f8-4320-a524-cacaee1fe270" path="/var/lib/kubelet/pods/94cfa186-41f8-4320-a524-cacaee1fe270/volumes" Jan 22 00:12:44 crc kubenswrapper[4829]: I0122 00:12:44.571210 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e69d04d0-ec93-4501-8d79-f19e886a50b2" path="/var/lib/kubelet/pods/e69d04d0-ec93-4501-8d79-f19e886a50b2/volumes" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.089806 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-77d68bfdb-fjrng"] Jan 22 00:12:45 crc kubenswrapper[4829]: E0122 00:12:45.090116 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e69d04d0-ec93-4501-8d79-f19e886a50b2" containerName="controller-manager" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.090140 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="e69d04d0-ec93-4501-8d79-f19e886a50b2" containerName="controller-manager" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.090286 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="e69d04d0-ec93-4501-8d79-f19e886a50b2" containerName="controller-manager" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.090784 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.095751 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.096757 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.096822 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.096852 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.097134 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.100302 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77d68bfdb-fjrng"] Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.102068 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.103845 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.272659 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-config\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.272772 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-client-ca\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.272806 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93584ba9-f317-4437-ba69-2d872ac03a29-serving-cert\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.272914 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-proxy-ca-bundles\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.272950 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdfsz\" (UniqueName: \"kubernetes.io/projected/93584ba9-f317-4437-ba69-2d872ac03a29-kube-api-access-cdfsz\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.374411 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93584ba9-f317-4437-ba69-2d872ac03a29-serving-cert\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.374519 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-proxy-ca-bundles\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.374559 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdfsz\" (UniqueName: \"kubernetes.io/projected/93584ba9-f317-4437-ba69-2d872ac03a29-kube-api-access-cdfsz\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.374610 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-config\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.374641 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-client-ca\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.376458 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-client-ca\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.376622 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-proxy-ca-bundles\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.377706 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-config\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.383459 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93584ba9-f317-4437-ba69-2d872ac03a29-serving-cert\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.396268 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdfsz\" (UniqueName: \"kubernetes.io/projected/93584ba9-f317-4437-ba69-2d872ac03a29-kube-api-access-cdfsz\") pod \"controller-manager-77d68bfdb-fjrng\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.408851 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:45 crc kubenswrapper[4829]: I0122 00:12:45.816751 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-77d68bfdb-fjrng"] Jan 22 00:12:46 crc kubenswrapper[4829]: I0122 00:12:46.309531 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" event={"ID":"93584ba9-f317-4437-ba69-2d872ac03a29","Type":"ContainerStarted","Data":"38a46e8ca97424e795d8dc462cc59cbe350e3c91d12bbaeefa5fb411ec5e0c12"} Jan 22 00:12:46 crc kubenswrapper[4829]: I0122 00:12:46.309943 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" event={"ID":"93584ba9-f317-4437-ba69-2d872ac03a29","Type":"ContainerStarted","Data":"1cf2e8d3f678e17b8149d49e729d2b3ea0c04730fa2ec75c084a6f186320d340"} Jan 22 00:12:46 crc kubenswrapper[4829]: I0122 00:12:46.335483 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" podStartSLOduration=5.335464763 podStartE2EDuration="5.335464763s" podCreationTimestamp="2026-01-22 00:12:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:12:46.330158139 +0000 UTC m=+344.366400061" watchObservedRunningTime="2026-01-22 00:12:46.335464763 +0000 UTC m=+344.371706675" Jan 22 00:12:47 crc kubenswrapper[4829]: I0122 00:12:47.314471 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:12:47 crc kubenswrapper[4829]: I0122 00:12:47.320271 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:13:04 crc kubenswrapper[4829]: I0122 00:13:04.658640 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:13:04 crc kubenswrapper[4829]: I0122 00:13:04.659217 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:13:21 crc kubenswrapper[4829]: I0122 00:13:21.612530 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-77d68bfdb-fjrng"] Jan 22 00:13:21 crc kubenswrapper[4829]: I0122 00:13:21.613310 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" podUID="93584ba9-f317-4437-ba69-2d872ac03a29" containerName="controller-manager" containerID="cri-o://38a46e8ca97424e795d8dc462cc59cbe350e3c91d12bbaeefa5fb411ec5e0c12" gracePeriod=30 Jan 22 00:13:21 crc kubenswrapper[4829]: I0122 00:13:21.630376 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5"] Jan 22 00:13:21 crc kubenswrapper[4829]: I0122 00:13:21.630645 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" podUID="e4fe0441-5159-457b-b6c7-0ffd168435f2" containerName="route-controller-manager" containerID="cri-o://9fa75420709da61d13ebf183e3f5c501a0fb3b2cdd87d05eaba5569e7a7bcf2b" gracePeriod=30 Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.528824 4829 generic.go:334] "Generic (PLEG): container finished" podID="e4fe0441-5159-457b-b6c7-0ffd168435f2" containerID="9fa75420709da61d13ebf183e3f5c501a0fb3b2cdd87d05eaba5569e7a7bcf2b" exitCode=0 Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.529077 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" event={"ID":"e4fe0441-5159-457b-b6c7-0ffd168435f2","Type":"ContainerDied","Data":"9fa75420709da61d13ebf183e3f5c501a0fb3b2cdd87d05eaba5569e7a7bcf2b"} Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.531877 4829 generic.go:334] "Generic (PLEG): container finished" podID="93584ba9-f317-4437-ba69-2d872ac03a29" containerID="38a46e8ca97424e795d8dc462cc59cbe350e3c91d12bbaeefa5fb411ec5e0c12" exitCode=0 Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.531932 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" event={"ID":"93584ba9-f317-4437-ba69-2d872ac03a29","Type":"ContainerDied","Data":"38a46e8ca97424e795d8dc462cc59cbe350e3c91d12bbaeefa5fb411ec5e0c12"} Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.664645 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.674829 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.699448 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-pt22h"] Jan 22 00:13:22 crc kubenswrapper[4829]: E0122 00:13:22.699854 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4fe0441-5159-457b-b6c7-0ffd168435f2" containerName="route-controller-manager" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.699879 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4fe0441-5159-457b-b6c7-0ffd168435f2" containerName="route-controller-manager" Jan 22 00:13:22 crc kubenswrapper[4829]: E0122 00:13:22.699902 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93584ba9-f317-4437-ba69-2d872ac03a29" containerName="controller-manager" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.699913 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="93584ba9-f317-4437-ba69-2d872ac03a29" containerName="controller-manager" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.700091 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4fe0441-5159-457b-b6c7-0ffd168435f2" containerName="route-controller-manager" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.700113 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="93584ba9-f317-4437-ba69-2d872ac03a29" containerName="controller-manager" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.700866 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.715860 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-pt22h"] Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864016 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qh79\" (UniqueName: \"kubernetes.io/projected/e4fe0441-5159-457b-b6c7-0ffd168435f2-kube-api-access-2qh79\") pod \"e4fe0441-5159-457b-b6c7-0ffd168435f2\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864096 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-config\") pod \"93584ba9-f317-4437-ba69-2d872ac03a29\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864131 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-proxy-ca-bundles\") pod \"93584ba9-f317-4437-ba69-2d872ac03a29\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864183 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93584ba9-f317-4437-ba69-2d872ac03a29-serving-cert\") pod \"93584ba9-f317-4437-ba69-2d872ac03a29\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864201 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-config\") pod \"e4fe0441-5159-457b-b6c7-0ffd168435f2\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864231 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdfsz\" (UniqueName: \"kubernetes.io/projected/93584ba9-f317-4437-ba69-2d872ac03a29-kube-api-access-cdfsz\") pod \"93584ba9-f317-4437-ba69-2d872ac03a29\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864262 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4fe0441-5159-457b-b6c7-0ffd168435f2-serving-cert\") pod \"e4fe0441-5159-457b-b6c7-0ffd168435f2\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864281 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-client-ca\") pod \"e4fe0441-5159-457b-b6c7-0ffd168435f2\" (UID: \"e4fe0441-5159-457b-b6c7-0ffd168435f2\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864303 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-client-ca\") pod \"93584ba9-f317-4437-ba69-2d872ac03a29\" (UID: \"93584ba9-f317-4437-ba69-2d872ac03a29\") " Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864419 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-config\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864521 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-proxy-ca-bundles\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864569 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-serving-cert\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864602 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-client-ca\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.864653 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8dhm\" (UniqueName: \"kubernetes.io/projected/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-kube-api-access-t8dhm\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.865405 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-config" (OuterVolumeSpecName: "config") pod "e4fe0441-5159-457b-b6c7-0ffd168435f2" (UID: "e4fe0441-5159-457b-b6c7-0ffd168435f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.865438 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "93584ba9-f317-4437-ba69-2d872ac03a29" (UID: "93584ba9-f317-4437-ba69-2d872ac03a29"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.866143 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-config" (OuterVolumeSpecName: "config") pod "93584ba9-f317-4437-ba69-2d872ac03a29" (UID: "93584ba9-f317-4437-ba69-2d872ac03a29"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.866533 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-client-ca" (OuterVolumeSpecName: "client-ca") pod "93584ba9-f317-4437-ba69-2d872ac03a29" (UID: "93584ba9-f317-4437-ba69-2d872ac03a29"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.866707 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-client-ca" (OuterVolumeSpecName: "client-ca") pod "e4fe0441-5159-457b-b6c7-0ffd168435f2" (UID: "e4fe0441-5159-457b-b6c7-0ffd168435f2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.870850 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4fe0441-5159-457b-b6c7-0ffd168435f2-kube-api-access-2qh79" (OuterVolumeSpecName: "kube-api-access-2qh79") pod "e4fe0441-5159-457b-b6c7-0ffd168435f2" (UID: "e4fe0441-5159-457b-b6c7-0ffd168435f2"). InnerVolumeSpecName "kube-api-access-2qh79". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.871024 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4fe0441-5159-457b-b6c7-0ffd168435f2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e4fe0441-5159-457b-b6c7-0ffd168435f2" (UID: "e4fe0441-5159-457b-b6c7-0ffd168435f2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.877197 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93584ba9-f317-4437-ba69-2d872ac03a29-kube-api-access-cdfsz" (OuterVolumeSpecName: "kube-api-access-cdfsz") pod "93584ba9-f317-4437-ba69-2d872ac03a29" (UID: "93584ba9-f317-4437-ba69-2d872ac03a29"). InnerVolumeSpecName "kube-api-access-cdfsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.881192 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93584ba9-f317-4437-ba69-2d872ac03a29-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "93584ba9-f317-4437-ba69-2d872ac03a29" (UID: "93584ba9-f317-4437-ba69-2d872ac03a29"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.966482 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-proxy-ca-bundles\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.969801 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-serving-cert\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.970079 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-client-ca\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.970197 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8dhm\" (UniqueName: \"kubernetes.io/projected/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-kube-api-access-t8dhm\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.970300 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-config\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.969679 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-proxy-ca-bundles\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.971650 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-client-ca\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972358 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qh79\" (UniqueName: \"kubernetes.io/projected/e4fe0441-5159-457b-b6c7-0ffd168435f2-kube-api-access-2qh79\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972397 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972412 4829 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972426 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93584ba9-f317-4437-ba69-2d872ac03a29-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972441 4829 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972454 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdfsz\" (UniqueName: \"kubernetes.io/projected/93584ba9-f317-4437-ba69-2d872ac03a29-kube-api-access-cdfsz\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972467 4829 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4fe0441-5159-457b-b6c7-0ffd168435f2-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972479 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4fe0441-5159-457b-b6c7-0ffd168435f2-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.972489 4829 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93584ba9-f317-4437-ba69-2d872ac03a29-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.973038 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-config\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:22 crc kubenswrapper[4829]: I0122 00:13:22.976320 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-serving-cert\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.008688 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8dhm\" (UniqueName: \"kubernetes.io/projected/5338a7cf-f8a3-40ce-aa4b-ae878d98e08d-kube-api-access-t8dhm\") pod \"controller-manager-79d8647b8b-pt22h\" (UID: \"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d\") " pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.014054 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.231164 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-79d8647b8b-pt22h"] Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.539172 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" event={"ID":"93584ba9-f317-4437-ba69-2d872ac03a29","Type":"ContainerDied","Data":"1cf2e8d3f678e17b8149d49e729d2b3ea0c04730fa2ec75c084a6f186320d340"} Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.539450 4829 scope.go:117] "RemoveContainer" containerID="38a46e8ca97424e795d8dc462cc59cbe350e3c91d12bbaeefa5fb411ec5e0c12" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.539240 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-77d68bfdb-fjrng" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.541123 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" event={"ID":"e4fe0441-5159-457b-b6c7-0ffd168435f2","Type":"ContainerDied","Data":"eb1d77db8c22972d929c2b1c81a51fea47b2b89a454001dd93d72f9d8ae8ab5b"} Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.541143 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.542937 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" event={"ID":"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d","Type":"ContainerStarted","Data":"42127e64d278acd7121bcd084425610aff9b68fa272b3dab4870ddcfbf2bdf22"} Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.542973 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" event={"ID":"5338a7cf-f8a3-40ce-aa4b-ae878d98e08d","Type":"ContainerStarted","Data":"7de0de98971871d6b1f3601a21d4193db9d7ecafb68495ec711e9de549706c2d"} Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.544000 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.558285 4829 scope.go:117] "RemoveContainer" containerID="9fa75420709da61d13ebf183e3f5c501a0fb3b2cdd87d05eaba5569e7a7bcf2b" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.558363 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.583459 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-79d8647b8b-pt22h" podStartSLOduration=2.583440289 podStartE2EDuration="2.583440289s" podCreationTimestamp="2026-01-22 00:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:13:23.579495794 +0000 UTC m=+381.615737706" watchObservedRunningTime="2026-01-22 00:13:23.583440289 +0000 UTC m=+381.619682201" Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.604999 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-77d68bfdb-fjrng"] Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.610659 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-77d68bfdb-fjrng"] Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.641211 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5"] Jan 22 00:13:23 crc kubenswrapper[4829]: I0122 00:13:23.644238 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66d9b996-tcgh5"] Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.162557 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g7zhb"] Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.163454 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.165698 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.182453 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g7zhb"] Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.188998 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zx4mr\" (UniqueName: \"kubernetes.io/projected/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-kube-api-access-zx4mr\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.189057 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-catalog-content\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.189261 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-utilities\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.290286 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zx4mr\" (UniqueName: \"kubernetes.io/projected/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-kube-api-access-zx4mr\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.290339 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-catalog-content\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.290398 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-utilities\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.291239 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-utilities\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.291345 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-catalog-content\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.313578 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zx4mr\" (UniqueName: \"kubernetes.io/projected/e0b65e9f-05eb-47f2-ba46-cb2de6aa0888-kube-api-access-zx4mr\") pod \"certified-operators-g7zhb\" (UID: \"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888\") " pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.485786 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.561106 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93584ba9-f317-4437-ba69-2d872ac03a29" path="/var/lib/kubelet/pods/93584ba9-f317-4437-ba69-2d872ac03a29/volumes" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.561921 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4fe0441-5159-457b-b6c7-0ffd168435f2" path="/var/lib/kubelet/pods/e4fe0441-5159-457b-b6c7-0ffd168435f2/volumes" Jan 22 00:13:24 crc kubenswrapper[4829]: I0122 00:13:24.969190 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g7zhb"] Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.109080 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q"] Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.110144 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.114062 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.114200 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.114270 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.115916 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.116120 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.116319 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.127196 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q"] Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.206135 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-config\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.207096 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-serving-cert\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.207135 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-client-ca\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.308589 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-config\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.308691 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-serving-cert\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.308747 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hvdw\" (UniqueName: \"kubernetes.io/projected/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-kube-api-access-6hvdw\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.308793 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-client-ca\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.309803 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-client-ca\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.309868 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-config\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.315029 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-serving-cert\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.410372 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hvdw\" (UniqueName: \"kubernetes.io/projected/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-kube-api-access-6hvdw\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.431620 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hvdw\" (UniqueName: \"kubernetes.io/projected/b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131-kube-api-access-6hvdw\") pod \"route-controller-manager-bb6b49549-r6q4q\" (UID: \"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131\") " pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.495587 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.557934 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k4khp"] Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.559819 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.561628 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.568886 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4khp"] Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.579845 4829 generic.go:334] "Generic (PLEG): container finished" podID="e0b65e9f-05eb-47f2-ba46-cb2de6aa0888" containerID="1fd44bcd0208d5257705696b2769853ed6d5d9f1705b1d3c0946b0e432ff9e2c" exitCode=0 Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.579908 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g7zhb" event={"ID":"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888","Type":"ContainerDied","Data":"1fd44bcd0208d5257705696b2769853ed6d5d9f1705b1d3c0946b0e432ff9e2c"} Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.579959 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g7zhb" event={"ID":"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888","Type":"ContainerStarted","Data":"7691c2ba8b4e5229bc4e4c2eb35f6f3f6065adf1f2c178d2a570eb8e49c22e31"} Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.713171 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2mzd\" (UniqueName: \"kubernetes.io/projected/562c006a-af63-46fc-91b8-fcab2322a59d-kube-api-access-c2mzd\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.713357 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/562c006a-af63-46fc-91b8-fcab2322a59d-catalog-content\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.713409 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/562c006a-af63-46fc-91b8-fcab2322a59d-utilities\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.814529 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/562c006a-af63-46fc-91b8-fcab2322a59d-utilities\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.814626 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2mzd\" (UniqueName: \"kubernetes.io/projected/562c006a-af63-46fc-91b8-fcab2322a59d-kube-api-access-c2mzd\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.814670 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/562c006a-af63-46fc-91b8-fcab2322a59d-catalog-content\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.815129 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/562c006a-af63-46fc-91b8-fcab2322a59d-catalog-content\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.815370 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/562c006a-af63-46fc-91b8-fcab2322a59d-utilities\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.871472 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2mzd\" (UniqueName: \"kubernetes.io/projected/562c006a-af63-46fc-91b8-fcab2322a59d-kube-api-access-c2mzd\") pod \"community-operators-k4khp\" (UID: \"562c006a-af63-46fc-91b8-fcab2322a59d\") " pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.884275 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:25 crc kubenswrapper[4829]: I0122 00:13:25.946576 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q"] Jan 22 00:13:25 crc kubenswrapper[4829]: W0122 00:13:25.955779 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6dd07f6_8dcb_4d1a_9ad2_6b4d75af3131.slice/crio-c0f99b7523208257ea7bab2b47a278d14e8ac9b76827d3a1fcb5aaacd70f95b6 WatchSource:0}: Error finding container c0f99b7523208257ea7bab2b47a278d14e8ac9b76827d3a1fcb5aaacd70f95b6: Status 404 returned error can't find the container with id c0f99b7523208257ea7bab2b47a278d14e8ac9b76827d3a1fcb5aaacd70f95b6 Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.324424 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4khp"] Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.563932 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-twbvl"] Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.566839 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.572764 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.577362 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-twbvl"] Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.599057 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" event={"ID":"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131","Type":"ContainerStarted","Data":"aa22b89244dd23ab02c04be961e27254575005db4af720f8f635d8a2f0d38866"} Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.599117 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" event={"ID":"b6dd07f6-8dcb-4d1a-9ad2-6b4d75af3131","Type":"ContainerStarted","Data":"c0f99b7523208257ea7bab2b47a278d14e8ac9b76827d3a1fcb5aaacd70f95b6"} Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.600365 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.604701 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g7zhb" event={"ID":"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888","Type":"ContainerStarted","Data":"da18b59140e9eb0c9d0eb7f92e6fb810d5ad5ec4af6d5898a2b3c33a4eadb640"} Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.609643 4829 generic.go:334] "Generic (PLEG): container finished" podID="562c006a-af63-46fc-91b8-fcab2322a59d" containerID="c65f8b56835c091e1e1085f1d723b65467e66453e2502cd0c979c10318f451de" exitCode=0 Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.609675 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4khp" event={"ID":"562c006a-af63-46fc-91b8-fcab2322a59d","Type":"ContainerDied","Data":"c65f8b56835c091e1e1085f1d723b65467e66453e2502cd0c979c10318f451de"} Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.609693 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4khp" event={"ID":"562c006a-af63-46fc-91b8-fcab2322a59d","Type":"ContainerStarted","Data":"780a7caf339ba4e3615c20f8499c6be24e9744eaca61f2d6d89156dc2a116e14"} Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.618473 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.631049 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-bb6b49549-r6q4q" podStartSLOduration=5.6310249 podStartE2EDuration="5.6310249s" podCreationTimestamp="2026-01-22 00:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:13:26.628818474 +0000 UTC m=+384.665060396" watchObservedRunningTime="2026-01-22 00:13:26.6310249 +0000 UTC m=+384.667266812" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.631742 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-catalog-content\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.631801 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-utilities\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.631914 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbbgt\" (UniqueName: \"kubernetes.io/projected/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-kube-api-access-xbbgt\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.733564 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbbgt\" (UniqueName: \"kubernetes.io/projected/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-kube-api-access-xbbgt\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.733635 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-catalog-content\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.733662 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-utilities\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.734864 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-utilities\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.734873 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-catalog-content\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.764036 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbbgt\" (UniqueName: \"kubernetes.io/projected/d464f7a3-9a5c-4666-90a5-26ab2bf827b1-kube-api-access-xbbgt\") pod \"redhat-operators-twbvl\" (UID: \"d464f7a3-9a5c-4666-90a5-26ab2bf827b1\") " pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:26.900905 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:27.365170 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-twbvl"] Jan 22 00:13:27 crc kubenswrapper[4829]: W0122 00:13:27.376715 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd464f7a3_9a5c_4666_90a5_26ab2bf827b1.slice/crio-92a80b6a985e08923657f7aca446a58c6dadb6c0d8a6c480bba8e82e56a90839 WatchSource:0}: Error finding container 92a80b6a985e08923657f7aca446a58c6dadb6c0d8a6c480bba8e82e56a90839: Status 404 returned error can't find the container with id 92a80b6a985e08923657f7aca446a58c6dadb6c0d8a6c480bba8e82e56a90839 Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:27.617021 4829 generic.go:334] "Generic (PLEG): container finished" podID="e0b65e9f-05eb-47f2-ba46-cb2de6aa0888" containerID="da18b59140e9eb0c9d0eb7f92e6fb810d5ad5ec4af6d5898a2b3c33a4eadb640" exitCode=0 Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:27.617130 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g7zhb" event={"ID":"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888","Type":"ContainerDied","Data":"da18b59140e9eb0c9d0eb7f92e6fb810d5ad5ec4af6d5898a2b3c33a4eadb640"} Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:27.618447 4829 generic.go:334] "Generic (PLEG): container finished" podID="d464f7a3-9a5c-4666-90a5-26ab2bf827b1" containerID="dbf5b7d5384c9d4d0d8a33778a75d1ad4ecc3d4390e0ad55e148922c48e4635c" exitCode=0 Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:27.618493 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twbvl" event={"ID":"d464f7a3-9a5c-4666-90a5-26ab2bf827b1","Type":"ContainerDied","Data":"dbf5b7d5384c9d4d0d8a33778a75d1ad4ecc3d4390e0ad55e148922c48e4635c"} Jan 22 00:13:27 crc kubenswrapper[4829]: I0122 00:13:27.618552 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twbvl" event={"ID":"d464f7a3-9a5c-4666-90a5-26ab2bf827b1","Type":"ContainerStarted","Data":"92a80b6a985e08923657f7aca446a58c6dadb6c0d8a6c480bba8e82e56a90839"} Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.364167 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ngtfk"] Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.365934 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.368522 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.384740 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ngtfk"] Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.460890 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-utilities\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.461005 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-catalog-content\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.461052 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb9ll\" (UniqueName: \"kubernetes.io/projected/4ef73aa9-86c5-452b-af65-e8b14090b016-kube-api-access-sb9ll\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.562470 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-utilities\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.562931 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-catalog-content\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.562973 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb9ll\" (UniqueName: \"kubernetes.io/projected/4ef73aa9-86c5-452b-af65-e8b14090b016-kube-api-access-sb9ll\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.563240 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-utilities\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.563510 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-catalog-content\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.591507 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb9ll\" (UniqueName: \"kubernetes.io/projected/4ef73aa9-86c5-452b-af65-e8b14090b016-kube-api-access-sb9ll\") pod \"redhat-marketplace-ngtfk\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.628452 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g7zhb" event={"ID":"e0b65e9f-05eb-47f2-ba46-cb2de6aa0888","Type":"ContainerStarted","Data":"1b12e959cde1ec79d4fae846ca3eea73f7607d32c84ffc4010dd16db9263e4e7"} Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.632266 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twbvl" event={"ID":"d464f7a3-9a5c-4666-90a5-26ab2bf827b1","Type":"ContainerStarted","Data":"a39f6c63ccac44ef015123d88a566f1effdf623a3f1b8a0bf09466d6f0327e3e"} Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.634824 4829 generic.go:334] "Generic (PLEG): container finished" podID="562c006a-af63-46fc-91b8-fcab2322a59d" containerID="6d839ffa97db26a0cad303e506d002d4ba665cd741114b5d43e42a0931f3dc0e" exitCode=0 Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.634940 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4khp" event={"ID":"562c006a-af63-46fc-91b8-fcab2322a59d","Type":"ContainerDied","Data":"6d839ffa97db26a0cad303e506d002d4ba665cd741114b5d43e42a0931f3dc0e"} Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.655187 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g7zhb" podStartSLOduration=2.148950611 podStartE2EDuration="4.655163651s" podCreationTimestamp="2026-01-22 00:13:24 +0000 UTC" firstStartedPulling="2026-01-22 00:13:25.582063338 +0000 UTC m=+383.618305250" lastFinishedPulling="2026-01-22 00:13:28.088276378 +0000 UTC m=+386.124518290" observedRunningTime="2026-01-22 00:13:28.649338419 +0000 UTC m=+386.685580351" watchObservedRunningTime="2026-01-22 00:13:28.655163651 +0000 UTC m=+386.691405563" Jan 22 00:13:28 crc kubenswrapper[4829]: I0122 00:13:28.717029 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.202819 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ngtfk"] Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.644512 4829 generic.go:334] "Generic (PLEG): container finished" podID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerID="7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22" exitCode=0 Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.644582 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngtfk" event={"ID":"4ef73aa9-86c5-452b-af65-e8b14090b016","Type":"ContainerDied","Data":"7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22"} Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.644631 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngtfk" event={"ID":"4ef73aa9-86c5-452b-af65-e8b14090b016","Type":"ContainerStarted","Data":"3959a1cb38de364bb56d5f4d919b77013df891fb2bb4d8dad2cb15fcdf339f15"} Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.646218 4829 generic.go:334] "Generic (PLEG): container finished" podID="d464f7a3-9a5c-4666-90a5-26ab2bf827b1" containerID="a39f6c63ccac44ef015123d88a566f1effdf623a3f1b8a0bf09466d6f0327e3e" exitCode=0 Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.646269 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twbvl" event={"ID":"d464f7a3-9a5c-4666-90a5-26ab2bf827b1","Type":"ContainerDied","Data":"a39f6c63ccac44ef015123d88a566f1effdf623a3f1b8a0bf09466d6f0327e3e"} Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.649251 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4khp" event={"ID":"562c006a-af63-46fc-91b8-fcab2322a59d","Type":"ContainerStarted","Data":"5d7eecbeb383243292f5f80f3b32ef33b39b575c1716ec916aba2e6183cb1839"} Jan 22 00:13:29 crc kubenswrapper[4829]: I0122 00:13:29.686650 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k4khp" podStartSLOduration=2.262341539 podStartE2EDuration="4.686631406s" podCreationTimestamp="2026-01-22 00:13:25 +0000 UTC" firstStartedPulling="2026-01-22 00:13:26.611187424 +0000 UTC m=+384.647429336" lastFinishedPulling="2026-01-22 00:13:29.035477291 +0000 UTC m=+387.071719203" observedRunningTime="2026-01-22 00:13:29.683876621 +0000 UTC m=+387.720118533" watchObservedRunningTime="2026-01-22 00:13:29.686631406 +0000 UTC m=+387.722873318" Jan 22 00:13:30 crc kubenswrapper[4829]: I0122 00:13:30.661302 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-twbvl" event={"ID":"d464f7a3-9a5c-4666-90a5-26ab2bf827b1","Type":"ContainerStarted","Data":"4a6f5bf082e644dc487b7dceea231510794ee8865102a90715686095b3b988b3"} Jan 22 00:13:30 crc kubenswrapper[4829]: I0122 00:13:30.682615 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-twbvl" podStartSLOduration=2.270108636 podStartE2EDuration="4.682587105s" podCreationTimestamp="2026-01-22 00:13:26 +0000 UTC" firstStartedPulling="2026-01-22 00:13:27.619688836 +0000 UTC m=+385.655930748" lastFinishedPulling="2026-01-22 00:13:30.032167305 +0000 UTC m=+388.068409217" observedRunningTime="2026-01-22 00:13:30.681835109 +0000 UTC m=+388.718077061" watchObservedRunningTime="2026-01-22 00:13:30.682587105 +0000 UTC m=+388.718829017" Jan 22 00:13:31 crc kubenswrapper[4829]: I0122 00:13:31.958591 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gm6bk"] Jan 22 00:13:31 crc kubenswrapper[4829]: I0122 00:13:31.960382 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:31 crc kubenswrapper[4829]: I0122 00:13:31.972231 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gm6bk"] Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.010628 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/001bf53f-5df3-463f-92c1-7a0a666f2e37-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.010680 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/001bf53f-5df3-463f-92c1-7a0a666f2e37-trusted-ca\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.010736 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ggh9\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-kube-api-access-9ggh9\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.010759 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/001bf53f-5df3-463f-92c1-7a0a666f2e37-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.010775 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/001bf53f-5df3-463f-92c1-7a0a666f2e37-registry-certificates\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.010791 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-registry-tls\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.011596 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.011640 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-bound-sa-token\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.045618 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.112583 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-bound-sa-token\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.112659 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/001bf53f-5df3-463f-92c1-7a0a666f2e37-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.112687 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/001bf53f-5df3-463f-92c1-7a0a666f2e37-trusted-ca\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.112742 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ggh9\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-kube-api-access-9ggh9\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.112768 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/001bf53f-5df3-463f-92c1-7a0a666f2e37-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.112784 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/001bf53f-5df3-463f-92c1-7a0a666f2e37-registry-certificates\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.112800 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-registry-tls\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.113140 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/001bf53f-5df3-463f-92c1-7a0a666f2e37-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.114213 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/001bf53f-5df3-463f-92c1-7a0a666f2e37-registry-certificates\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.114527 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/001bf53f-5df3-463f-92c1-7a0a666f2e37-trusted-ca\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.119864 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-registry-tls\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.119877 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/001bf53f-5df3-463f-92c1-7a0a666f2e37-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.135511 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ggh9\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-kube-api-access-9ggh9\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.136101 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/001bf53f-5df3-463f-92c1-7a0a666f2e37-bound-sa-token\") pod \"image-registry-66df7c8f76-gm6bk\" (UID: \"001bf53f-5df3-463f-92c1-7a0a666f2e37\") " pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.282377 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.675266 4829 generic.go:334] "Generic (PLEG): container finished" podID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerID="5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af" exitCode=0 Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.675331 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngtfk" event={"ID":"4ef73aa9-86c5-452b-af65-e8b14090b016","Type":"ContainerDied","Data":"5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af"} Jan 22 00:13:32 crc kubenswrapper[4829]: I0122 00:13:32.728977 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gm6bk"] Jan 22 00:13:32 crc kubenswrapper[4829]: W0122 00:13:32.737736 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod001bf53f_5df3_463f_92c1_7a0a666f2e37.slice/crio-e90748e0066f8431d7645a6944304ebe8028a41102fbf37e953307e111b5df34 WatchSource:0}: Error finding container e90748e0066f8431d7645a6944304ebe8028a41102fbf37e953307e111b5df34: Status 404 returned error can't find the container with id e90748e0066f8431d7645a6944304ebe8028a41102fbf37e953307e111b5df34 Jan 22 00:13:33 crc kubenswrapper[4829]: I0122 00:13:33.687248 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" event={"ID":"001bf53f-5df3-463f-92c1-7a0a666f2e37","Type":"ContainerStarted","Data":"072aaccdaa6fbeac0a65b122770dbcbbfb644345b120093167d57ff7a69d065e"} Jan 22 00:13:33 crc kubenswrapper[4829]: I0122 00:13:33.687517 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" event={"ID":"001bf53f-5df3-463f-92c1-7a0a666f2e37","Type":"ContainerStarted","Data":"e90748e0066f8431d7645a6944304ebe8028a41102fbf37e953307e111b5df34"} Jan 22 00:13:33 crc kubenswrapper[4829]: I0122 00:13:33.687589 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:33 crc kubenswrapper[4829]: I0122 00:13:33.690310 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngtfk" event={"ID":"4ef73aa9-86c5-452b-af65-e8b14090b016","Type":"ContainerStarted","Data":"a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f"} Jan 22 00:13:33 crc kubenswrapper[4829]: I0122 00:13:33.724086 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" podStartSLOduration=2.724070214 podStartE2EDuration="2.724070214s" podCreationTimestamp="2026-01-22 00:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:13:33.722405207 +0000 UTC m=+391.758647119" watchObservedRunningTime="2026-01-22 00:13:33.724070214 +0000 UTC m=+391.760312126" Jan 22 00:13:33 crc kubenswrapper[4829]: I0122 00:13:33.745503 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ngtfk" podStartSLOduration=1.847472738 podStartE2EDuration="5.745484994s" podCreationTimestamp="2026-01-22 00:13:28 +0000 UTC" firstStartedPulling="2026-01-22 00:13:29.646210239 +0000 UTC m=+387.682452161" lastFinishedPulling="2026-01-22 00:13:33.544222465 +0000 UTC m=+391.580464417" observedRunningTime="2026-01-22 00:13:33.744327734 +0000 UTC m=+391.780569656" watchObservedRunningTime="2026-01-22 00:13:33.745484994 +0000 UTC m=+391.781726906" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.486678 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.486753 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.545773 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.648410 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.648570 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.653928 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.653996 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.654086 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.668317 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.668372 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.673208 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:13:34 crc kubenswrapper[4829]: I0122 00:13:34.745266 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g7zhb" Jan 22 00:13:35 crc kubenswrapper[4829]: W0122 00:13:35.112092 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-1131151f01f2ba097a836d40418d0749513c9980d699ac10391c43aa62a023a0 WatchSource:0}: Error finding container 1131151f01f2ba097a836d40418d0749513c9980d699ac10391c43aa62a023a0: Status 404 returned error can't find the container with id 1131151f01f2ba097a836d40418d0749513c9980d699ac10391c43aa62a023a0 Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.703839 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"35dca701c2ade0b9706ece0948f72f1668b6431ac3f472d46a3ff6c975f896dc"} Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.703892 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"91edccb69682474cd99ac60d6cdc764949544dedca437010d7476ad7790eb7de"} Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.705841 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5a121f022ebd5b40179f34e29400550ca6ea48f0d75a2cf81c8055e3b323764b"} Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.705907 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1131151f01f2ba097a836d40418d0749513c9980d699ac10391c43aa62a023a0"} Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.706089 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.885385 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.885469 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:35 crc kubenswrapper[4829]: I0122 00:13:35.932761 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:36 crc kubenswrapper[4829]: I0122 00:13:36.750164 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k4khp" Jan 22 00:13:36 crc kubenswrapper[4829]: I0122 00:13:36.901224 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:36 crc kubenswrapper[4829]: I0122 00:13:36.901293 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:36 crc kubenswrapper[4829]: I0122 00:13:36.957157 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:37 crc kubenswrapper[4829]: I0122 00:13:37.764909 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-twbvl" Jan 22 00:13:38 crc kubenswrapper[4829]: I0122 00:13:38.718371 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:38 crc kubenswrapper[4829]: I0122 00:13:38.719816 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:38 crc kubenswrapper[4829]: I0122 00:13:38.814004 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:39 crc kubenswrapper[4829]: I0122 00:13:39.819757 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:13:52 crc kubenswrapper[4829]: I0122 00:13:52.289430 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-gm6bk" Jan 22 00:13:52 crc kubenswrapper[4829]: I0122 00:13:52.365930 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6ltv9"] Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.658701 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.659499 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.659627 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.660382 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d8aed39761e236f4d6bdcace20b41c7a53f6e11cebd6db8c3d28637f0ea9c9c"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.660468 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://7d8aed39761e236f4d6bdcace20b41c7a53f6e11cebd6db8c3d28637f0ea9c9c" gracePeriod=600 Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.886587 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="7d8aed39761e236f4d6bdcace20b41c7a53f6e11cebd6db8c3d28637f0ea9c9c" exitCode=0 Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.886690 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"7d8aed39761e236f4d6bdcace20b41c7a53f6e11cebd6db8c3d28637f0ea9c9c"} Jan 22 00:14:04 crc kubenswrapper[4829]: I0122 00:14:04.887065 4829 scope.go:117] "RemoveContainer" containerID="94a5dfab57ace84e5ba85843e9ea6b8a5e3b9a82d70c823140ce43f4bfd36e1e" Jan 22 00:14:05 crc kubenswrapper[4829]: I0122 00:14:05.904674 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"cb30e41ea57ec072bdbdc7d1adc9b52feca9581b84ae5cf109ff0d3fe8f78fec"} Jan 22 00:14:14 crc kubenswrapper[4829]: I0122 00:14:14.678944 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.407950 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" podUID="7755e10e-4f27-44b7-b510-a8e4b5e53e1d" containerName="registry" containerID="cri-o://6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926" gracePeriod=30 Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.518008 4829 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-6ltv9 container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.16:5000/healthz\": dial tcp 10.217.0.16:5000: connect: connection refused" start-of-body= Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.518391 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" podUID="7755e10e-4f27-44b7-b510-a8e4b5e53e1d" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.16:5000/healthz\": dial tcp 10.217.0.16:5000: connect: connection refused" Jan 22 00:14:17 crc kubenswrapper[4829]: E0122 00:14:17.542817 4829 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7755e10e_4f27_44b7_b510_a8e4b5e53e1d.slice/crio-6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926.scope\": RecentStats: unable to find data in memory cache]" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.832518 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.927960 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-installation-pull-secrets\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.928027 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-ca-trust-extracted\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.928266 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.928301 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wwn2\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-kube-api-access-9wwn2\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.928522 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-tls\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.928617 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-certificates\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.928670 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-bound-sa-token\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.930100 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.930398 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-trusted-ca\") pod \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\" (UID: \"7755e10e-4f27-44b7-b510-a8e4b5e53e1d\") " Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.930923 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.931168 4829 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.931198 4829 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.935280 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.936101 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-kube-api-access-9wwn2" (OuterVolumeSpecName: "kube-api-access-9wwn2") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "kube-api-access-9wwn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.936231 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.936629 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.939647 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.955652 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "7755e10e-4f27-44b7-b510-a8e4b5e53e1d" (UID: "7755e10e-4f27-44b7-b510-a8e4b5e53e1d"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.967122 4829 generic.go:334] "Generic (PLEG): container finished" podID="7755e10e-4f27-44b7-b510-a8e4b5e53e1d" containerID="6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926" exitCode=0 Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.967171 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" event={"ID":"7755e10e-4f27-44b7-b510-a8e4b5e53e1d","Type":"ContainerDied","Data":"6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926"} Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.967199 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" event={"ID":"7755e10e-4f27-44b7-b510-a8e4b5e53e1d","Type":"ContainerDied","Data":"edc4ef08a9220353663d5d5a372f8136d2abbffa15e66d8521dc4ea2712e9268"} Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.967215 4829 scope.go:117] "RemoveContainer" containerID="6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.967320 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-6ltv9" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.982641 4829 scope.go:117] "RemoveContainer" containerID="6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926" Jan 22 00:14:17 crc kubenswrapper[4829]: E0122 00:14:17.983260 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926\": container with ID starting with 6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926 not found: ID does not exist" containerID="6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926" Jan 22 00:14:17 crc kubenswrapper[4829]: I0122 00:14:17.983298 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926"} err="failed to get container status \"6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926\": rpc error: code = NotFound desc = could not find container \"6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926\": container with ID starting with 6cb2232521eb98f2a0f75624120f5c55000d32065763ba838018a25bac91d926 not found: ID does not exist" Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.002006 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6ltv9"] Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.008642 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-6ltv9"] Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.032529 4829 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.032583 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wwn2\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-kube-api-access-9wwn2\") on node \"crc\" DevicePath \"\"" Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.032595 4829 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.032608 4829 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.032617 4829 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7755e10e-4f27-44b7-b510-a8e4b5e53e1d-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:14:18 crc kubenswrapper[4829]: I0122 00:14:18.562825 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7755e10e-4f27-44b7-b510-a8e4b5e53e1d" path="/var/lib/kubelet/pods/7755e10e-4f27-44b7-b510-a8e4b5e53e1d/volumes" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.179307 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9"] Jan 22 00:15:00 crc kubenswrapper[4829]: E0122 00:15:00.180141 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7755e10e-4f27-44b7-b510-a8e4b5e53e1d" containerName="registry" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.180158 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7755e10e-4f27-44b7-b510-a8e4b5e53e1d" containerName="registry" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.180297 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7755e10e-4f27-44b7-b510-a8e4b5e53e1d" containerName="registry" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.180764 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.183765 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.186175 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.194761 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9"] Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.334904 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-secret-volume\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.335003 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-config-volume\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.335033 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxvrg\" (UniqueName: \"kubernetes.io/projected/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-kube-api-access-qxvrg\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.436929 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-config-volume\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.436984 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxvrg\" (UniqueName: \"kubernetes.io/projected/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-kube-api-access-qxvrg\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.437044 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-secret-volume\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.437982 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-config-volume\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.451156 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-secret-volume\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.459291 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxvrg\" (UniqueName: \"kubernetes.io/projected/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-kube-api-access-qxvrg\") pod \"collect-profiles-29484015-jmwr9\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.502277 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:00 crc kubenswrapper[4829]: I0122 00:15:00.921858 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9"] Jan 22 00:15:01 crc kubenswrapper[4829]: I0122 00:15:01.253406 4829 generic.go:334] "Generic (PLEG): container finished" podID="3ae46e15-a52a-445b-8d48-ad5ba5415ed0" containerID="4aa378a603bff3188493fd4abed3143cc4ceed135d138543f2cbd48941d6e91e" exitCode=0 Jan 22 00:15:01 crc kubenswrapper[4829]: I0122 00:15:01.253567 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" event={"ID":"3ae46e15-a52a-445b-8d48-ad5ba5415ed0","Type":"ContainerDied","Data":"4aa378a603bff3188493fd4abed3143cc4ceed135d138543f2cbd48941d6e91e"} Jan 22 00:15:01 crc kubenswrapper[4829]: I0122 00:15:01.253990 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" event={"ID":"3ae46e15-a52a-445b-8d48-ad5ba5415ed0","Type":"ContainerStarted","Data":"9d93f25cd4fdab2e1e83626de4ed26c64e527e124af0b460e4dc63e8eadfa9cc"} Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.547450 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.667996 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-config-volume\") pod \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.668052 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-secret-volume\") pod \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.668104 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxvrg\" (UniqueName: \"kubernetes.io/projected/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-kube-api-access-qxvrg\") pod \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\" (UID: \"3ae46e15-a52a-445b-8d48-ad5ba5415ed0\") " Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.668683 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-config-volume" (OuterVolumeSpecName: "config-volume") pod "3ae46e15-a52a-445b-8d48-ad5ba5415ed0" (UID: "3ae46e15-a52a-445b-8d48-ad5ba5415ed0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.672390 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-kube-api-access-qxvrg" (OuterVolumeSpecName: "kube-api-access-qxvrg") pod "3ae46e15-a52a-445b-8d48-ad5ba5415ed0" (UID: "3ae46e15-a52a-445b-8d48-ad5ba5415ed0"). InnerVolumeSpecName "kube-api-access-qxvrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.675721 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3ae46e15-a52a-445b-8d48-ad5ba5415ed0" (UID: "3ae46e15-a52a-445b-8d48-ad5ba5415ed0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.769367 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxvrg\" (UniqueName: \"kubernetes.io/projected/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-kube-api-access-qxvrg\") on node \"crc\" DevicePath \"\"" Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.769792 4829 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:15:02 crc kubenswrapper[4829]: I0122 00:15:02.769873 4829 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ae46e15-a52a-445b-8d48-ad5ba5415ed0-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:15:03 crc kubenswrapper[4829]: I0122 00:15:03.270815 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" event={"ID":"3ae46e15-a52a-445b-8d48-ad5ba5415ed0","Type":"ContainerDied","Data":"9d93f25cd4fdab2e1e83626de4ed26c64e527e124af0b460e4dc63e8eadfa9cc"} Jan 22 00:15:03 crc kubenswrapper[4829]: I0122 00:15:03.270915 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d93f25cd4fdab2e1e83626de4ed26c64e527e124af0b460e4dc63e8eadfa9cc" Jan 22 00:15:03 crc kubenswrapper[4829]: I0122 00:15:03.271021 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9" Jan 22 00:16:04 crc kubenswrapper[4829]: I0122 00:16:04.658779 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:16:04 crc kubenswrapper[4829]: I0122 00:16:04.659311 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:16:34 crc kubenswrapper[4829]: I0122 00:16:34.658405 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:16:34 crc kubenswrapper[4829]: I0122 00:16:34.662280 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.167058 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fd6j8"] Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.167986 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-controller" containerID="cri-o://b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.168366 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="sbdb" containerID="cri-o://1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.168416 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="nbdb" containerID="cri-o://417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.168454 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="northd" containerID="cri-o://03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.168483 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.168556 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-node" containerID="cri-o://f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.168599 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-acl-logging" containerID="cri-o://a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.217142 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" containerID="cri-o://55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" gracePeriod=30 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.504684 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/3.log" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.507009 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovn-acl-logging/0.log" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.507729 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovn-controller/0.log" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.508397 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.561608 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xp7m4"] Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.561945 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-node" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.561969 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-node" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.561988 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-acl-logging" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.561996 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-acl-logging" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562010 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562021 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562030 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562039 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562050 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562059 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562072 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kubecfg-setup" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562081 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kubecfg-setup" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562091 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="sbdb" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562100 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="sbdb" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562112 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="northd" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562120 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="northd" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562134 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562145 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562157 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="nbdb" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562164 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="nbdb" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562173 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562181 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562197 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562206 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562215 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ae46e15-a52a-445b-8d48-ad5ba5415ed0" containerName="collect-profiles" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562224 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ae46e15-a52a-445b-8d48-ad5ba5415ed0" containerName="collect-profiles" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562345 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562358 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="northd" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562371 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ae46e15-a52a-445b-8d48-ad5ba5415ed0" containerName="collect-profiles" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562384 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562395 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="sbdb" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562406 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562415 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562427 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-ovn-metrics" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562438 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562450 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="kube-rbac-proxy-node" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562467 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-acl-logging" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562479 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="nbdb" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562493 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovn-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: E0122 00:16:47.562634 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.562644 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerName="ovnkube-controller" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.565003 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.606759 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-var-lib-openvswitch\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.606836 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-env-overrides\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.606887 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-config\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.606931 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-systemd-units\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.606969 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-script-lib\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607011 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-ovn\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607052 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-netns\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607087 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-systemd\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607120 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovn-node-metrics-cert\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607150 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7zv6\" (UniqueName: \"kubernetes.io/projected/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-kube-api-access-z7zv6\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607187 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-openvswitch\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607223 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-netd\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607254 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-slash\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607281 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-ovn-kubernetes\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607311 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-etc-openvswitch\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607365 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-kubelet\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607407 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-log-socket\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607452 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-bin\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607505 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-node-log\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607565 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-var-lib-cni-networks-ovn-kubernetes\") pod \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\" (UID: \"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340\") " Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607897 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607953 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607944 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.607987 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608020 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-slash" (OuterVolumeSpecName: "host-slash") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608021 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608044 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608063 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608096 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608131 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608162 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-log-socket" (OuterVolumeSpecName: "log-socket") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608193 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608224 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-node-log" (OuterVolumeSpecName: "node-log") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608377 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608390 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608447 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.608792 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.614824 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-kube-api-access-z7zv6" (OuterVolumeSpecName: "kube-api-access-z7zv6") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "kube-api-access-z7zv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.616364 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.630422 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" (UID: "7df1ca93-0e8f-4f06-8b8f-2297a8dbb340"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709272 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-systemd\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709332 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-run-ovn-kubernetes\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709376 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-ovn\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709402 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709427 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovnkube-config\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709459 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-run-netns\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709590 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovnkube-script-lib\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709720 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-env-overrides\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709760 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709892 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovn-node-metrics-cert\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.709956 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-log-socket\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710052 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-etc-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710137 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-var-lib-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710193 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-cni-netd\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710293 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-node-log\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710336 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h2hr\" (UniqueName: \"kubernetes.io/projected/d3ab096b-e391-47c0-bb3e-885c1b52af13-kube-api-access-6h2hr\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710377 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-systemd-units\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710421 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-cni-bin\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710494 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-slash\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710602 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-kubelet\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710710 4829 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-node-log\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710753 4829 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710781 4829 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710806 4829 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710829 4829 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710853 4829 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710876 4829 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710899 4829 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710915 4829 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710931 4829 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710948 4829 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710966 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7zv6\" (UniqueName: \"kubernetes.io/projected/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-kube-api-access-z7zv6\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.710984 4829 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.711000 4829 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.711017 4829 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-slash\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.711034 4829 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.711051 4829 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.711068 4829 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.711083 4829 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-log-socket\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.711099 4829 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.812099 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-kubelet\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.812493 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-systemd\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.812704 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-run-ovn-kubernetes\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.812946 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813087 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.812789 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-run-ovn-kubernetes\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.812631 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-systemd\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813112 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-ovn\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.812279 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-kubelet\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813245 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovnkube-config\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813340 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovnkube-script-lib\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813384 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-run-netns\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813439 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-env-overrides\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813485 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813566 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-run-netns\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813732 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813588 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovn-node-metrics-cert\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813787 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-run-ovn\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813864 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-log-socket\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813838 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-log-socket\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.813925 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-etc-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814002 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-cni-netd\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814051 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-var-lib-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814104 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-etc-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814113 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-cni-netd\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814211 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-node-log\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814248 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h2hr\" (UniqueName: \"kubernetes.io/projected/d3ab096b-e391-47c0-bb3e-885c1b52af13-kube-api-access-6h2hr\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814282 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-systemd-units\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814275 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-node-log\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814320 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-cni-bin\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814358 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-cni-bin\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814399 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-systemd-units\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814190 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-var-lib-openvswitch\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814383 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-slash\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.814456 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d3ab096b-e391-47c0-bb3e-885c1b52af13-host-slash\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.815037 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-env-overrides\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.815097 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovnkube-script-lib\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.815219 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovnkube-config\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.822126 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d3ab096b-e391-47c0-bb3e-885c1b52af13-ovn-node-metrics-cert\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.839323 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h2hr\" (UniqueName: \"kubernetes.io/projected/d3ab096b-e391-47c0-bb3e-885c1b52af13-kube-api-access-6h2hr\") pod \"ovnkube-node-xp7m4\" (UID: \"d3ab096b-e391-47c0-bb3e-885c1b52af13\") " pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.887301 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.992555 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/2.log" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.993380 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/1.log" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.993431 4829 generic.go:334] "Generic (PLEG): container finished" podID="60f879f6-8b21-4e75-9a62-d372fec048e1" containerID="12d5f08bdea530d824af56e3874c32cf12d50fe29bbc262f27f839089044880d" exitCode=2 Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.993494 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerDied","Data":"12d5f08bdea530d824af56e3874c32cf12d50fe29bbc262f27f839089044880d"} Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.993628 4829 scope.go:117] "RemoveContainer" containerID="83e98a4d32040f6ba77c2c30a1833ff63b851a4aac473f7367192aa2fdc68a4c" Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.995783 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"929f52a813c4fccb5038e9e34aee092a28c3bff067aaca897a179e7790b15a91"} Jan 22 00:16:47 crc kubenswrapper[4829]: I0122 00:16:47.997506 4829 scope.go:117] "RemoveContainer" containerID="12d5f08bdea530d824af56e3874c32cf12d50fe29bbc262f27f839089044880d" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.000744 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-4ss4n_openshift-multus(60f879f6-8b21-4e75-9a62-d372fec048e1)\"" pod="openshift-multus/multus-4ss4n" podUID="60f879f6-8b21-4e75-9a62-d372fec048e1" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.001721 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovnkube-controller/3.log" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.005719 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovn-acl-logging/0.log" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.006794 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fd6j8_7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/ovn-controller/0.log" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007355 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" exitCode=0 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007423 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" exitCode=0 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007439 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" exitCode=0 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007456 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" exitCode=0 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007469 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" exitCode=0 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007482 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" exitCode=0 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007493 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" exitCode=143 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007505 4829 generic.go:334] "Generic (PLEG): container finished" podID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" containerID="b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" exitCode=143 Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007532 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007583 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007599 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007612 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007625 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007640 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007654 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007670 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007692 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007701 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007709 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007717 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007620 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.007725 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008189 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008198 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008204 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008214 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008228 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008234 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008240 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008245 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008250 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008255 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008261 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008267 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008273 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008278 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008286 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008294 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008300 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008305 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008310 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008316 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008320 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008325 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008331 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008335 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008340 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008347 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fd6j8" event={"ID":"7df1ca93-0e8f-4f06-8b8f-2297a8dbb340","Type":"ContainerDied","Data":"711074ca5051de154268f10698a9c0a6ab129910a1ec19030c4c0dd967a58a80"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008355 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008360 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008365 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008371 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008376 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008382 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008387 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008392 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008397 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.008402 4829 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.052826 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fd6j8"] Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.055963 4829 scope.go:117] "RemoveContainer" containerID="55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.057301 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fd6j8"] Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.075885 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.093784 4829 scope.go:117] "RemoveContainer" containerID="1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.146577 4829 scope.go:117] "RemoveContainer" containerID="417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.158977 4829 scope.go:117] "RemoveContainer" containerID="03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.278801 4829 scope.go:117] "RemoveContainer" containerID="89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.303827 4829 scope.go:117] "RemoveContainer" containerID="f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.340346 4829 scope.go:117] "RemoveContainer" containerID="a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.354360 4829 scope.go:117] "RemoveContainer" containerID="b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.369559 4829 scope.go:117] "RemoveContainer" containerID="b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.383007 4829 scope.go:117] "RemoveContainer" containerID="55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.383837 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": container with ID starting with 55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183 not found: ID does not exist" containerID="55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.383877 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} err="failed to get container status \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": rpc error: code = NotFound desc = could not find container \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": container with ID starting with 55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.383917 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.384242 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": container with ID starting with 9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63 not found: ID does not exist" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.384296 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} err="failed to get container status \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": rpc error: code = NotFound desc = could not find container \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": container with ID starting with 9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.384338 4829 scope.go:117] "RemoveContainer" containerID="1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.384727 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": container with ID starting with 1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d not found: ID does not exist" containerID="1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.384765 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} err="failed to get container status \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": rpc error: code = NotFound desc = could not find container \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": container with ID starting with 1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.384783 4829 scope.go:117] "RemoveContainer" containerID="417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.385019 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": container with ID starting with 417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495 not found: ID does not exist" containerID="417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.385046 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} err="failed to get container status \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": rpc error: code = NotFound desc = could not find container \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": container with ID starting with 417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.385063 4829 scope.go:117] "RemoveContainer" containerID="03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.385403 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": container with ID starting with 03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c not found: ID does not exist" containerID="03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.385430 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} err="failed to get container status \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": rpc error: code = NotFound desc = could not find container \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": container with ID starting with 03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.385449 4829 scope.go:117] "RemoveContainer" containerID="89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.385999 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": container with ID starting with 89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269 not found: ID does not exist" containerID="89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.386053 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} err="failed to get container status \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": rpc error: code = NotFound desc = could not find container \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": container with ID starting with 89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.386084 4829 scope.go:117] "RemoveContainer" containerID="f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.386452 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": container with ID starting with f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615 not found: ID does not exist" containerID="f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.386485 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} err="failed to get container status \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": rpc error: code = NotFound desc = could not find container \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": container with ID starting with f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.386505 4829 scope.go:117] "RemoveContainer" containerID="a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.386733 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": container with ID starting with a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e not found: ID does not exist" containerID="a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.386761 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} err="failed to get container status \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": rpc error: code = NotFound desc = could not find container \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": container with ID starting with a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.386779 4829 scope.go:117] "RemoveContainer" containerID="b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.387105 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": container with ID starting with b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990 not found: ID does not exist" containerID="b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.387134 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} err="failed to get container status \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": rpc error: code = NotFound desc = could not find container \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": container with ID starting with b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.387152 4829 scope.go:117] "RemoveContainer" containerID="b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579" Jan 22 00:16:48 crc kubenswrapper[4829]: E0122 00:16:48.388483 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": container with ID starting with b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579 not found: ID does not exist" containerID="b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.388506 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} err="failed to get container status \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": rpc error: code = NotFound desc = could not find container \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": container with ID starting with b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.388523 4829 scope.go:117] "RemoveContainer" containerID="55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.388911 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} err="failed to get container status \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": rpc error: code = NotFound desc = could not find container \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": container with ID starting with 55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.388933 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.389609 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} err="failed to get container status \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": rpc error: code = NotFound desc = could not find container \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": container with ID starting with 9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.389638 4829 scope.go:117] "RemoveContainer" containerID="1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.389955 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} err="failed to get container status \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": rpc error: code = NotFound desc = could not find container \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": container with ID starting with 1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.389984 4829 scope.go:117] "RemoveContainer" containerID="417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.390746 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} err="failed to get container status \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": rpc error: code = NotFound desc = could not find container \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": container with ID starting with 417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.390780 4829 scope.go:117] "RemoveContainer" containerID="03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.391066 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} err="failed to get container status \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": rpc error: code = NotFound desc = could not find container \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": container with ID starting with 03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.391087 4829 scope.go:117] "RemoveContainer" containerID="89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.391315 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} err="failed to get container status \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": rpc error: code = NotFound desc = could not find container \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": container with ID starting with 89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.391332 4829 scope.go:117] "RemoveContainer" containerID="f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392112 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} err="failed to get container status \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": rpc error: code = NotFound desc = could not find container \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": container with ID starting with f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392138 4829 scope.go:117] "RemoveContainer" containerID="a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392389 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} err="failed to get container status \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": rpc error: code = NotFound desc = could not find container \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": container with ID starting with a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392417 4829 scope.go:117] "RemoveContainer" containerID="b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392599 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} err="failed to get container status \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": rpc error: code = NotFound desc = could not find container \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": container with ID starting with b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392621 4829 scope.go:117] "RemoveContainer" containerID="b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392801 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} err="failed to get container status \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": rpc error: code = NotFound desc = could not find container \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": container with ID starting with b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.392821 4829 scope.go:117] "RemoveContainer" containerID="55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393057 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} err="failed to get container status \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": rpc error: code = NotFound desc = could not find container \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": container with ID starting with 55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393086 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393322 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} err="failed to get container status \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": rpc error: code = NotFound desc = could not find container \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": container with ID starting with 9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393345 4829 scope.go:117] "RemoveContainer" containerID="1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393604 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} err="failed to get container status \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": rpc error: code = NotFound desc = could not find container \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": container with ID starting with 1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393626 4829 scope.go:117] "RemoveContainer" containerID="417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393887 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} err="failed to get container status \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": rpc error: code = NotFound desc = could not find container \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": container with ID starting with 417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.393922 4829 scope.go:117] "RemoveContainer" containerID="03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.394152 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} err="failed to get container status \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": rpc error: code = NotFound desc = could not find container \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": container with ID starting with 03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.394176 4829 scope.go:117] "RemoveContainer" containerID="89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.394404 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} err="failed to get container status \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": rpc error: code = NotFound desc = could not find container \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": container with ID starting with 89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.394428 4829 scope.go:117] "RemoveContainer" containerID="f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.394653 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} err="failed to get container status \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": rpc error: code = NotFound desc = could not find container \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": container with ID starting with f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.394674 4829 scope.go:117] "RemoveContainer" containerID="a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.395055 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} err="failed to get container status \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": rpc error: code = NotFound desc = could not find container \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": container with ID starting with a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.395080 4829 scope.go:117] "RemoveContainer" containerID="b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.395328 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} err="failed to get container status \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": rpc error: code = NotFound desc = could not find container \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": container with ID starting with b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.395348 4829 scope.go:117] "RemoveContainer" containerID="b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.395654 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} err="failed to get container status \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": rpc error: code = NotFound desc = could not find container \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": container with ID starting with b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.395672 4829 scope.go:117] "RemoveContainer" containerID="55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.396148 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183"} err="failed to get container status \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": rpc error: code = NotFound desc = could not find container \"55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183\": container with ID starting with 55e326aac44b69c6601993dd9b76d7effbfe645458908b67bc23e3827c8c1183 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.396178 4829 scope.go:117] "RemoveContainer" containerID="9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.396488 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63"} err="failed to get container status \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": rpc error: code = NotFound desc = could not find container \"9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63\": container with ID starting with 9db0e6ba514af8490d566485e8a6e3b0c5cf2e3303d8653681414e05200dea63 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.396523 4829 scope.go:117] "RemoveContainer" containerID="1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.396781 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d"} err="failed to get container status \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": rpc error: code = NotFound desc = could not find container \"1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d\": container with ID starting with 1d429f1f381e50b4524c56a290da0a1bb12a3f7bc30b0fb9215c98796bd9162d not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.396809 4829 scope.go:117] "RemoveContainer" containerID="417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397141 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495"} err="failed to get container status \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": rpc error: code = NotFound desc = could not find container \"417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495\": container with ID starting with 417657d375f2a8055e2234d6b2fb2a4ab6866fe32f7144243fc24b185ad88495 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397168 4829 scope.go:117] "RemoveContainer" containerID="03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397394 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c"} err="failed to get container status \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": rpc error: code = NotFound desc = could not find container \"03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c\": container with ID starting with 03cc42d2ebcd59a95c5c3768605bf21d0f801b48dd9c5780effbea03bfaadd4c not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397420 4829 scope.go:117] "RemoveContainer" containerID="89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397641 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269"} err="failed to get container status \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": rpc error: code = NotFound desc = could not find container \"89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269\": container with ID starting with 89042a2479f428d3869a0754e0405f50e896e7d292ea12cd9fbadde2b545d269 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397700 4829 scope.go:117] "RemoveContainer" containerID="f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397953 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615"} err="failed to get container status \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": rpc error: code = NotFound desc = could not find container \"f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615\": container with ID starting with f96f18af1171f9590927382fb033dff53f1f576796b8aaa47e66b11c94fa5615 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.397977 4829 scope.go:117] "RemoveContainer" containerID="a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.398180 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e"} err="failed to get container status \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": rpc error: code = NotFound desc = could not find container \"a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e\": container with ID starting with a8c4ab31b90d8e555aecb2ea0a0b1196020f57255d58be36a4e297d24d36056e not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.398202 4829 scope.go:117] "RemoveContainer" containerID="b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.398456 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990"} err="failed to get container status \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": rpc error: code = NotFound desc = could not find container \"b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990\": container with ID starting with b35f979e3fab60012a48bf7620a6acee6ba0169d747d2ff3c439c37bbede6990 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.398484 4829 scope.go:117] "RemoveContainer" containerID="b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.398762 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579"} err="failed to get container status \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": rpc error: code = NotFound desc = could not find container \"b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579\": container with ID starting with b74805f75a44cb08e9f0379b285802f2a334cd1e618cf16863695d3a82c51579 not found: ID does not exist" Jan 22 00:16:48 crc kubenswrapper[4829]: I0122 00:16:48.566348 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7df1ca93-0e8f-4f06-8b8f-2297a8dbb340" path="/var/lib/kubelet/pods/7df1ca93-0e8f-4f06-8b8f-2297a8dbb340/volumes" Jan 22 00:16:49 crc kubenswrapper[4829]: I0122 00:16:49.022398 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/2.log" Jan 22 00:16:49 crc kubenswrapper[4829]: I0122 00:16:49.026106 4829 generic.go:334] "Generic (PLEG): container finished" podID="d3ab096b-e391-47c0-bb3e-885c1b52af13" containerID="09cfac111eb74bfdf13a1f351be141169354f58a980b8f0e266dff093f404e71" exitCode=0 Jan 22 00:16:49 crc kubenswrapper[4829]: I0122 00:16:49.026214 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerDied","Data":"09cfac111eb74bfdf13a1f351be141169354f58a980b8f0e266dff093f404e71"} Jan 22 00:16:50 crc kubenswrapper[4829]: I0122 00:16:50.041038 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"f8be5efac0028b0ca4eee0467d5b965d722057d774763012ee1c74b0f13d261c"} Jan 22 00:16:50 crc kubenswrapper[4829]: I0122 00:16:50.041311 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"8472eec98ee0ccc5ed4c542ffa6d10ead2aa59a1e4953b2487a4bff02898c6f1"} Jan 22 00:16:50 crc kubenswrapper[4829]: I0122 00:16:50.041325 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"09fe86bdb62a1c0f315c56e71771262b35990ef44aab2c8745a6b75c7a625db1"} Jan 22 00:16:50 crc kubenswrapper[4829]: I0122 00:16:50.041338 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"fbadf2c5366e74f4b05ac3fd1fe2fdd2eedf36b21d55f3bd7dd1fb1b5998622a"} Jan 22 00:16:50 crc kubenswrapper[4829]: I0122 00:16:50.041352 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"57df34b37e65fe7cfd5e1ef5896027529c08f7f49db37548efb18d27f4affe57"} Jan 22 00:16:50 crc kubenswrapper[4829]: I0122 00:16:50.041363 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"f8d4247ed77ceb2e81476768df9fc5ab5f090f657d03214978cccc1162b1d4a9"} Jan 22 00:16:53 crc kubenswrapper[4829]: I0122 00:16:53.072859 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"bd393125edf1ac7410b6c6ba29d99463a5d05a9324b12ed69e8508903b92adfb"} Jan 22 00:16:55 crc kubenswrapper[4829]: I0122 00:16:55.087532 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" event={"ID":"d3ab096b-e391-47c0-bb3e-885c1b52af13","Type":"ContainerStarted","Data":"b4e9ebecfc9ec7bed5f9fb1f75e96fc9ce4a613e3ff84306f125aa9fe6b08ae6"} Jan 22 00:16:55 crc kubenswrapper[4829]: I0122 00:16:55.088020 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:55 crc kubenswrapper[4829]: I0122 00:16:55.088031 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:55 crc kubenswrapper[4829]: I0122 00:16:55.088040 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:55 crc kubenswrapper[4829]: I0122 00:16:55.113783 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:55 crc kubenswrapper[4829]: I0122 00:16:55.120291 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:16:55 crc kubenswrapper[4829]: I0122 00:16:55.135971 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" podStartSLOduration=8.135948365 podStartE2EDuration="8.135948365s" podCreationTimestamp="2026-01-22 00:16:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:16:55.128275517 +0000 UTC m=+593.164517449" watchObservedRunningTime="2026-01-22 00:16:55.135948365 +0000 UTC m=+593.172190297" Jan 22 00:17:00 crc kubenswrapper[4829]: I0122 00:17:00.553457 4829 scope.go:117] "RemoveContainer" containerID="12d5f08bdea530d824af56e3874c32cf12d50fe29bbc262f27f839089044880d" Jan 22 00:17:00 crc kubenswrapper[4829]: E0122 00:17:00.554573 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-4ss4n_openshift-multus(60f879f6-8b21-4e75-9a62-d372fec048e1)\"" pod="openshift-multus/multus-4ss4n" podUID="60f879f6-8b21-4e75-9a62-d372fec048e1" Jan 22 00:17:04 crc kubenswrapper[4829]: I0122 00:17:04.658395 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:17:04 crc kubenswrapper[4829]: I0122 00:17:04.658868 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:17:04 crc kubenswrapper[4829]: I0122 00:17:04.658943 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:17:04 crc kubenswrapper[4829]: I0122 00:17:04.659822 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cb30e41ea57ec072bdbdc7d1adc9b52feca9581b84ae5cf109ff0d3fe8f78fec"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:17:04 crc kubenswrapper[4829]: I0122 00:17:04.659920 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://cb30e41ea57ec072bdbdc7d1adc9b52feca9581b84ae5cf109ff0d3fe8f78fec" gracePeriod=600 Jan 22 00:17:05 crc kubenswrapper[4829]: I0122 00:17:05.155822 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="cb30e41ea57ec072bdbdc7d1adc9b52feca9581b84ae5cf109ff0d3fe8f78fec" exitCode=0 Jan 22 00:17:05 crc kubenswrapper[4829]: I0122 00:17:05.155892 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"cb30e41ea57ec072bdbdc7d1adc9b52feca9581b84ae5cf109ff0d3fe8f78fec"} Jan 22 00:17:05 crc kubenswrapper[4829]: I0122 00:17:05.156460 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"1a6767ee33d3f8536c661de53fec59c45e35b5827099605c67df4196857f9939"} Jan 22 00:17:05 crc kubenswrapper[4829]: I0122 00:17:05.156502 4829 scope.go:117] "RemoveContainer" containerID="7d8aed39761e236f4d6bdcace20b41c7a53f6e11cebd6db8c3d28637f0ea9c9c" Jan 22 00:17:13 crc kubenswrapper[4829]: I0122 00:17:13.553589 4829 scope.go:117] "RemoveContainer" containerID="12d5f08bdea530d824af56e3874c32cf12d50fe29bbc262f27f839089044880d" Jan 22 00:17:14 crc kubenswrapper[4829]: I0122 00:17:14.221216 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/2.log" Jan 22 00:17:14 crc kubenswrapper[4829]: I0122 00:17:14.221712 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4ss4n" event={"ID":"60f879f6-8b21-4e75-9a62-d372fec048e1","Type":"ContainerStarted","Data":"6eba75f2beae7a79e823a51b3396729ede271a89cd4f6e9174b4ca438f94a8d2"} Jan 22 00:17:17 crc kubenswrapper[4829]: I0122 00:17:17.924422 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xp7m4" Jan 22 00:17:54 crc kubenswrapper[4829]: I0122 00:17:54.940927 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ngtfk"] Jan 22 00:17:54 crc kubenswrapper[4829]: I0122 00:17:54.941829 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ngtfk" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="registry-server" containerID="cri-o://a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f" gracePeriod=30 Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.264309 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.433871 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-catalog-content\") pod \"4ef73aa9-86c5-452b-af65-e8b14090b016\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.433975 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-utilities\") pod \"4ef73aa9-86c5-452b-af65-e8b14090b016\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.434159 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb9ll\" (UniqueName: \"kubernetes.io/projected/4ef73aa9-86c5-452b-af65-e8b14090b016-kube-api-access-sb9ll\") pod \"4ef73aa9-86c5-452b-af65-e8b14090b016\" (UID: \"4ef73aa9-86c5-452b-af65-e8b14090b016\") " Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.435173 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-utilities" (OuterVolumeSpecName: "utilities") pod "4ef73aa9-86c5-452b-af65-e8b14090b016" (UID: "4ef73aa9-86c5-452b-af65-e8b14090b016"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.442711 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ef73aa9-86c5-452b-af65-e8b14090b016-kube-api-access-sb9ll" (OuterVolumeSpecName: "kube-api-access-sb9ll") pod "4ef73aa9-86c5-452b-af65-e8b14090b016" (UID: "4ef73aa9-86c5-452b-af65-e8b14090b016"). InnerVolumeSpecName "kube-api-access-sb9ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.465685 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ef73aa9-86c5-452b-af65-e8b14090b016" (UID: "4ef73aa9-86c5-452b-af65-e8b14090b016"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.530756 4829 generic.go:334] "Generic (PLEG): container finished" podID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerID="a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f" exitCode=0 Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.530983 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngtfk" event={"ID":"4ef73aa9-86c5-452b-af65-e8b14090b016","Type":"ContainerDied","Data":"a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f"} Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.531055 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ngtfk" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.531093 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ngtfk" event={"ID":"4ef73aa9-86c5-452b-af65-e8b14090b016","Type":"ContainerDied","Data":"3959a1cb38de364bb56d5f4d919b77013df891fb2bb4d8dad2cb15fcdf339f15"} Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.531121 4829 scope.go:117] "RemoveContainer" containerID="a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.538802 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb9ll\" (UniqueName: \"kubernetes.io/projected/4ef73aa9-86c5-452b-af65-e8b14090b016-kube-api-access-sb9ll\") on node \"crc\" DevicePath \"\"" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.538849 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.538870 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ef73aa9-86c5-452b-af65-e8b14090b016-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.569553 4829 scope.go:117] "RemoveContainer" containerID="5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.569791 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ngtfk"] Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.573833 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ngtfk"] Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.600024 4829 scope.go:117] "RemoveContainer" containerID="7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.619519 4829 scope.go:117] "RemoveContainer" containerID="a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f" Jan 22 00:17:55 crc kubenswrapper[4829]: E0122 00:17:55.619933 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f\": container with ID starting with a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f not found: ID does not exist" containerID="a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.619960 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f"} err="failed to get container status \"a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f\": rpc error: code = NotFound desc = could not find container \"a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f\": container with ID starting with a283e2259446191675a9079b57245ccdac2a2ba317813206f81d3954e94ccf5f not found: ID does not exist" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.619982 4829 scope.go:117] "RemoveContainer" containerID="5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af" Jan 22 00:17:55 crc kubenswrapper[4829]: E0122 00:17:55.620360 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af\": container with ID starting with 5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af not found: ID does not exist" containerID="5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.620416 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af"} err="failed to get container status \"5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af\": rpc error: code = NotFound desc = could not find container \"5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af\": container with ID starting with 5b84a27e68af623380f616ed816b77bcca1f35dcd76cc7bce4d042eb77b3f9af not found: ID does not exist" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.620491 4829 scope.go:117] "RemoveContainer" containerID="7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22" Jan 22 00:17:55 crc kubenswrapper[4829]: E0122 00:17:55.620956 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22\": container with ID starting with 7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22 not found: ID does not exist" containerID="7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22" Jan 22 00:17:55 crc kubenswrapper[4829]: I0122 00:17:55.620985 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22"} err="failed to get container status \"7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22\": rpc error: code = NotFound desc = could not find container \"7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22\": container with ID starting with 7ac75d528a25ad188768f98e7dac3bf07e4793154fbb71e0e1c484b04d01ca22 not found: ID does not exist" Jan 22 00:17:56 crc kubenswrapper[4829]: I0122 00:17:56.562210 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" path="/var/lib/kubelet/pods/4ef73aa9-86c5-452b-af65-e8b14090b016/volumes" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.581443 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt"] Jan 22 00:17:58 crc kubenswrapper[4829]: E0122 00:17:58.582073 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="extract-utilities" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.582094 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="extract-utilities" Jan 22 00:17:58 crc kubenswrapper[4829]: E0122 00:17:58.582124 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="registry-server" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.582137 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="registry-server" Jan 22 00:17:58 crc kubenswrapper[4829]: E0122 00:17:58.582154 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="extract-content" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.582167 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="extract-content" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.582330 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ef73aa9-86c5-452b-af65-e8b14090b016" containerName="registry-server" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.583639 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.586627 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.604739 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt"] Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.784910 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.785049 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.785103 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skwt9\" (UniqueName: \"kubernetes.io/projected/8a8c9ff7-662e-475e-8571-12d6572c3cd1-kube-api-access-skwt9\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.887298 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.887415 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.887454 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skwt9\" (UniqueName: \"kubernetes.io/projected/8a8c9ff7-662e-475e-8571-12d6572c3cd1-kube-api-access-skwt9\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.887806 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.887877 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.907316 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skwt9\" (UniqueName: \"kubernetes.io/projected/8a8c9ff7-662e-475e-8571-12d6572c3cd1-kube-api-access-skwt9\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:58 crc kubenswrapper[4829]: I0122 00:17:58.947698 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:17:59 crc kubenswrapper[4829]: I0122 00:17:59.216030 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt"] Jan 22 00:17:59 crc kubenswrapper[4829]: I0122 00:17:59.558890 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" event={"ID":"8a8c9ff7-662e-475e-8571-12d6572c3cd1","Type":"ContainerStarted","Data":"0c70e1f13df10813e0d6475e025b34d40261ba7ba7ad24b912ced686846a421a"} Jan 22 00:17:59 crc kubenswrapper[4829]: I0122 00:17:59.558965 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" event={"ID":"8a8c9ff7-662e-475e-8571-12d6572c3cd1","Type":"ContainerStarted","Data":"9b89bcbde352c6947d3f71df0403ad340b65cb7791faaa71277f055b6bc14c68"} Jan 22 00:18:00 crc kubenswrapper[4829]: I0122 00:18:00.575612 4829 generic.go:334] "Generic (PLEG): container finished" podID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerID="0c70e1f13df10813e0d6475e025b34d40261ba7ba7ad24b912ced686846a421a" exitCode=0 Jan 22 00:18:00 crc kubenswrapper[4829]: I0122 00:18:00.575695 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" event={"ID":"8a8c9ff7-662e-475e-8571-12d6572c3cd1","Type":"ContainerDied","Data":"0c70e1f13df10813e0d6475e025b34d40261ba7ba7ad24b912ced686846a421a"} Jan 22 00:18:00 crc kubenswrapper[4829]: I0122 00:18:00.578656 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 00:18:02 crc kubenswrapper[4829]: I0122 00:18:02.591141 4829 generic.go:334] "Generic (PLEG): container finished" podID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerID="39d7d52894a872f9fe00e11d3c8f7d1b6e4421e1efeddc1749c806ada979a14f" exitCode=0 Jan 22 00:18:02 crc kubenswrapper[4829]: I0122 00:18:02.591172 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" event={"ID":"8a8c9ff7-662e-475e-8571-12d6572c3cd1","Type":"ContainerDied","Data":"39d7d52894a872f9fe00e11d3c8f7d1b6e4421e1efeddc1749c806ada979a14f"} Jan 22 00:18:03 crc kubenswrapper[4829]: I0122 00:18:03.604915 4829 generic.go:334] "Generic (PLEG): container finished" podID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerID="6b7abfb600213d57199133c3b90f77d1bc8c413ec928efb88b54861a9e02671a" exitCode=0 Jan 22 00:18:03 crc kubenswrapper[4829]: I0122 00:18:03.605022 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" event={"ID":"8a8c9ff7-662e-475e-8571-12d6572c3cd1","Type":"ContainerDied","Data":"6b7abfb600213d57199133c3b90f77d1bc8c413ec928efb88b54861a9e02671a"} Jan 22 00:18:04 crc kubenswrapper[4829]: I0122 00:18:04.932849 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:18:04 crc kubenswrapper[4829]: I0122 00:18:04.977423 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skwt9\" (UniqueName: \"kubernetes.io/projected/8a8c9ff7-662e-475e-8571-12d6572c3cd1-kube-api-access-skwt9\") pod \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " Jan 22 00:18:04 crc kubenswrapper[4829]: I0122 00:18:04.977606 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-util\") pod \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " Jan 22 00:18:04 crc kubenswrapper[4829]: I0122 00:18:04.977710 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-bundle\") pod \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\" (UID: \"8a8c9ff7-662e-475e-8571-12d6572c3cd1\") " Jan 22 00:18:04 crc kubenswrapper[4829]: I0122 00:18:04.983139 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a8c9ff7-662e-475e-8571-12d6572c3cd1-kube-api-access-skwt9" (OuterVolumeSpecName: "kube-api-access-skwt9") pod "8a8c9ff7-662e-475e-8571-12d6572c3cd1" (UID: "8a8c9ff7-662e-475e-8571-12d6572c3cd1"). InnerVolumeSpecName "kube-api-access-skwt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:18:04 crc kubenswrapper[4829]: I0122 00:18:04.992972 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-bundle" (OuterVolumeSpecName: "bundle") pod "8a8c9ff7-662e-475e-8571-12d6572c3cd1" (UID: "8a8c9ff7-662e-475e-8571-12d6572c3cd1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.078655 4829 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.078693 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skwt9\" (UniqueName: \"kubernetes.io/projected/8a8c9ff7-662e-475e-8571-12d6572c3cd1-kube-api-access-skwt9\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.286932 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-util" (OuterVolumeSpecName: "util") pod "8a8c9ff7-662e-475e-8571-12d6572c3cd1" (UID: "8a8c9ff7-662e-475e-8571-12d6572c3cd1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.382921 4829 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a8c9ff7-662e-475e-8571-12d6572c3cd1-util\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.561860 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq"] Jan 22 00:18:05 crc kubenswrapper[4829]: E0122 00:18:05.562112 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerName="pull" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.562128 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerName="pull" Jan 22 00:18:05 crc kubenswrapper[4829]: E0122 00:18:05.562143 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerName="extract" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.562151 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerName="extract" Jan 22 00:18:05 crc kubenswrapper[4829]: E0122 00:18:05.562174 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerName="util" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.562182 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerName="util" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.562292 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a8c9ff7-662e-475e-8571-12d6572c3cd1" containerName="extract" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.563173 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.579919 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq"] Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.585326 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.585400 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcll2\" (UniqueName: \"kubernetes.io/projected/3b106cde-e55f-49c8-bf6a-c449b5c11b79-kube-api-access-wcll2\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.585638 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.618579 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" event={"ID":"8a8c9ff7-662e-475e-8571-12d6572c3cd1","Type":"ContainerDied","Data":"9b89bcbde352c6947d3f71df0403ad340b65cb7791faaa71277f055b6bc14c68"} Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.618620 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b89bcbde352c6947d3f71df0403ad340b65cb7791faaa71277f055b6bc14c68" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.618733 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.686866 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.686958 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcll2\" (UniqueName: \"kubernetes.io/projected/3b106cde-e55f-49c8-bf6a-c449b5c11b79-kube-api-access-wcll2\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.687022 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.687526 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.687621 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.703903 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcll2\" (UniqueName: \"kubernetes.io/projected/3b106cde-e55f-49c8-bf6a-c449b5c11b79-kube-api-access-wcll2\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:05 crc kubenswrapper[4829]: I0122 00:18:05.884953 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.111837 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq"] Jan 22 00:18:06 crc kubenswrapper[4829]: W0122 00:18:06.115647 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b106cde_e55f_49c8_bf6a_c449b5c11b79.slice/crio-3b7596c9c13d4dfc15483d2c351ebfc2cdef30057c7fd2fa1c633625826d6bf8 WatchSource:0}: Error finding container 3b7596c9c13d4dfc15483d2c351ebfc2cdef30057c7fd2fa1c633625826d6bf8: Status 404 returned error can't find the container with id 3b7596c9c13d4dfc15483d2c351ebfc2cdef30057c7fd2fa1c633625826d6bf8 Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.355726 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h"] Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.357308 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.364018 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h"] Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.395964 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.396038 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52rcj\" (UniqueName: \"kubernetes.io/projected/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-kube-api-access-52rcj\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.396111 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.497775 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.497854 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.497885 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52rcj\" (UniqueName: \"kubernetes.io/projected/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-kube-api-access-52rcj\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.498605 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.498636 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.515008 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52rcj\" (UniqueName: \"kubernetes.io/projected/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-kube-api-access-52rcj\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.626474 4829 generic.go:334] "Generic (PLEG): container finished" podID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerID="bd092f375965f312ad14a97d99880c6297327bf4269f92348f5033b7d31623ac" exitCode=0 Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.626525 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" event={"ID":"3b106cde-e55f-49c8-bf6a-c449b5c11b79","Type":"ContainerDied","Data":"bd092f375965f312ad14a97d99880c6297327bf4269f92348f5033b7d31623ac"} Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.626591 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" event={"ID":"3b106cde-e55f-49c8-bf6a-c449b5c11b79","Type":"ContainerStarted","Data":"3b7596c9c13d4dfc15483d2c351ebfc2cdef30057c7fd2fa1c633625826d6bf8"} Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.675972 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:06 crc kubenswrapper[4829]: I0122 00:18:06.892356 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h"] Jan 22 00:18:06 crc kubenswrapper[4829]: W0122 00:18:06.898954 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0bf87245_ac20_41c2_ad0e_4b08d5aea64e.slice/crio-22eaf59ec17428cf1aa0e96708a216003919997a321d6b7af4ad9adbb2b75a35 WatchSource:0}: Error finding container 22eaf59ec17428cf1aa0e96708a216003919997a321d6b7af4ad9adbb2b75a35: Status 404 returned error can't find the container with id 22eaf59ec17428cf1aa0e96708a216003919997a321d6b7af4ad9adbb2b75a35 Jan 22 00:18:07 crc kubenswrapper[4829]: I0122 00:18:07.635446 4829 generic.go:334] "Generic (PLEG): container finished" podID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerID="48bb013c94463b2b232f477a57eed6dd06ab5af057030d858525509c85a980e8" exitCode=0 Jan 22 00:18:07 crc kubenswrapper[4829]: I0122 00:18:07.635518 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" event={"ID":"0bf87245-ac20-41c2-ad0e-4b08d5aea64e","Type":"ContainerDied","Data":"48bb013c94463b2b232f477a57eed6dd06ab5af057030d858525509c85a980e8"} Jan 22 00:18:07 crc kubenswrapper[4829]: I0122 00:18:07.635592 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" event={"ID":"0bf87245-ac20-41c2-ad0e-4b08d5aea64e","Type":"ContainerStarted","Data":"22eaf59ec17428cf1aa0e96708a216003919997a321d6b7af4ad9adbb2b75a35"} Jan 22 00:18:08 crc kubenswrapper[4829]: I0122 00:18:08.647923 4829 generic.go:334] "Generic (PLEG): container finished" podID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerID="6a82fcffdd232f5a6d33e3b8b560e8886b607d198d0ace452124cbf1cf8b4ecf" exitCode=0 Jan 22 00:18:08 crc kubenswrapper[4829]: I0122 00:18:08.647991 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" event={"ID":"3b106cde-e55f-49c8-bf6a-c449b5c11b79","Type":"ContainerDied","Data":"6a82fcffdd232f5a6d33e3b8b560e8886b607d198d0ace452124cbf1cf8b4ecf"} Jan 22 00:18:08 crc kubenswrapper[4829]: I0122 00:18:08.658328 4829 generic.go:334] "Generic (PLEG): container finished" podID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerID="961ee05dc1a69ab3dd4d24a5af923259204578d961e5ad55f78036beb2a3726b" exitCode=0 Jan 22 00:18:08 crc kubenswrapper[4829]: I0122 00:18:08.658480 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" event={"ID":"0bf87245-ac20-41c2-ad0e-4b08d5aea64e","Type":"ContainerDied","Data":"961ee05dc1a69ab3dd4d24a5af923259204578d961e5ad55f78036beb2a3726b"} Jan 22 00:18:09 crc kubenswrapper[4829]: I0122 00:18:09.665108 4829 generic.go:334] "Generic (PLEG): container finished" podID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerID="7b0ef1fbf3829b55b8e28b862d7379ec3740e759ac8819c41bde18930c1c6a08" exitCode=0 Jan 22 00:18:09 crc kubenswrapper[4829]: I0122 00:18:09.665191 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" event={"ID":"3b106cde-e55f-49c8-bf6a-c449b5c11b79","Type":"ContainerDied","Data":"7b0ef1fbf3829b55b8e28b862d7379ec3740e759ac8819c41bde18930c1c6a08"} Jan 22 00:18:09 crc kubenswrapper[4829]: I0122 00:18:09.667664 4829 generic.go:334] "Generic (PLEG): container finished" podID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerID="e4a5e842df364ec3a29059f7a7047e6ab10641f0b708e5a7a458d171fadcd1b9" exitCode=0 Jan 22 00:18:09 crc kubenswrapper[4829]: I0122 00:18:09.667693 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" event={"ID":"0bf87245-ac20-41c2-ad0e-4b08d5aea64e","Type":"ContainerDied","Data":"e4a5e842df364ec3a29059f7a7047e6ab10641f0b708e5a7a458d171fadcd1b9"} Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.081003 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.138467 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.162738 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcll2\" (UniqueName: \"kubernetes.io/projected/3b106cde-e55f-49c8-bf6a-c449b5c11b79-kube-api-access-wcll2\") pod \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.162790 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-util\") pod \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.162825 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-bundle\") pod \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.162845 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-bundle\") pod \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.162935 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-util\") pod \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\" (UID: \"3b106cde-e55f-49c8-bf6a-c449b5c11b79\") " Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.162954 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52rcj\" (UniqueName: \"kubernetes.io/projected/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-kube-api-access-52rcj\") pod \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\" (UID: \"0bf87245-ac20-41c2-ad0e-4b08d5aea64e\") " Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.164627 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-bundle" (OuterVolumeSpecName: "bundle") pod "3b106cde-e55f-49c8-bf6a-c449b5c11b79" (UID: "3b106cde-e55f-49c8-bf6a-c449b5c11b79"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.165205 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-bundle" (OuterVolumeSpecName: "bundle") pod "0bf87245-ac20-41c2-ad0e-4b08d5aea64e" (UID: "0bf87245-ac20-41c2-ad0e-4b08d5aea64e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.169777 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b106cde-e55f-49c8-bf6a-c449b5c11b79-kube-api-access-wcll2" (OuterVolumeSpecName: "kube-api-access-wcll2") pod "3b106cde-e55f-49c8-bf6a-c449b5c11b79" (UID: "3b106cde-e55f-49c8-bf6a-c449b5c11b79"). InnerVolumeSpecName "kube-api-access-wcll2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.181522 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-util" (OuterVolumeSpecName: "util") pod "3b106cde-e55f-49c8-bf6a-c449b5c11b79" (UID: "3b106cde-e55f-49c8-bf6a-c449b5c11b79"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.183511 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-kube-api-access-52rcj" (OuterVolumeSpecName: "kube-api-access-52rcj") pod "0bf87245-ac20-41c2-ad0e-4b08d5aea64e" (UID: "0bf87245-ac20-41c2-ad0e-4b08d5aea64e"). InnerVolumeSpecName "kube-api-access-52rcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.194820 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-util" (OuterVolumeSpecName: "util") pod "0bf87245-ac20-41c2-ad0e-4b08d5aea64e" (UID: "0bf87245-ac20-41c2-ad0e-4b08d5aea64e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.264021 4829 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-util\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.264062 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52rcj\" (UniqueName: \"kubernetes.io/projected/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-kube-api-access-52rcj\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.264109 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcll2\" (UniqueName: \"kubernetes.io/projected/3b106cde-e55f-49c8-bf6a-c449b5c11b79-kube-api-access-wcll2\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.264121 4829 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-util\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.264132 4829 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0bf87245-ac20-41c2-ad0e-4b08d5aea64e-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.264143 4829 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3b106cde-e55f-49c8-bf6a-c449b5c11b79-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.680029 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.680021 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq" event={"ID":"3b106cde-e55f-49c8-bf6a-c449b5c11b79","Type":"ContainerDied","Data":"3b7596c9c13d4dfc15483d2c351ebfc2cdef30057c7fd2fa1c633625826d6bf8"} Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.680143 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b7596c9c13d4dfc15483d2c351ebfc2cdef30057c7fd2fa1c633625826d6bf8" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.681737 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" event={"ID":"0bf87245-ac20-41c2-ad0e-4b08d5aea64e","Type":"ContainerDied","Data":"22eaf59ec17428cf1aa0e96708a216003919997a321d6b7af4ad9adbb2b75a35"} Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.681759 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22eaf59ec17428cf1aa0e96708a216003919997a321d6b7af4ad9adbb2b75a35" Jan 22 00:18:11 crc kubenswrapper[4829]: I0122 00:18:11.681829 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.391637 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h"] Jan 22 00:18:14 crc kubenswrapper[4829]: E0122 00:18:14.392041 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerName="extract" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392052 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerName="extract" Jan 22 00:18:14 crc kubenswrapper[4829]: E0122 00:18:14.392061 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerName="util" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392067 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerName="util" Jan 22 00:18:14 crc kubenswrapper[4829]: E0122 00:18:14.392076 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerName="util" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392082 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerName="util" Jan 22 00:18:14 crc kubenswrapper[4829]: E0122 00:18:14.392093 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerName="extract" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392098 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerName="extract" Jan 22 00:18:14 crc kubenswrapper[4829]: E0122 00:18:14.392105 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerName="pull" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392111 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerName="pull" Jan 22 00:18:14 crc kubenswrapper[4829]: E0122 00:18:14.392122 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerName="pull" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392128 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerName="pull" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392213 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b106cde-e55f-49c8-bf6a-c449b5c11b79" containerName="extract" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392223 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bf87245-ac20-41c2-ad0e-4b08d5aea64e" containerName="extract" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.392876 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.395207 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.398164 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfv5w\" (UniqueName: \"kubernetes.io/projected/0225cd53-73fa-4345-85b3-7de7de23f707-kube-api-access-cfv5w\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.398212 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.398249 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.409976 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h"] Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.499363 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfv5w\" (UniqueName: \"kubernetes.io/projected/0225cd53-73fa-4345-85b3-7de7de23f707-kube-api-access-cfv5w\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.499426 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.499460 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.499989 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.502448 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.526459 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfv5w\" (UniqueName: \"kubernetes.io/projected/0225cd53-73fa-4345-85b3-7de7de23f707-kube-api-access-cfv5w\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:14 crc kubenswrapper[4829]: I0122 00:18:14.713662 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.129366 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h"] Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.701944 4829 generic.go:334] "Generic (PLEG): container finished" podID="0225cd53-73fa-4345-85b3-7de7de23f707" containerID="bcedcbab4105d0f190f853ab2ebf8879a152f64c8cea716f456a896d71678de4" exitCode=0 Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.701980 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" event={"ID":"0225cd53-73fa-4345-85b3-7de7de23f707","Type":"ContainerDied","Data":"bcedcbab4105d0f190f853ab2ebf8879a152f64c8cea716f456a896d71678de4"} Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.702003 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" event={"ID":"0225cd53-73fa-4345-85b3-7de7de23f707","Type":"ContainerStarted","Data":"3804ff49b1e73961f2c771ee87fe13c46b5176bd3a154144e471d8cc05540a36"} Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.783310 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7"] Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.784407 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.787079 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-qgxc7" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.787665 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.791343 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.801711 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7"] Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.912196 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft"] Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.912842 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.915995 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-76w2s" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.916090 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.919895 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xt25\" (UniqueName: \"kubernetes.io/projected/a6e6ea79-b55a-4652-bc1f-4788eb17b6d4-kube-api-access-2xt25\") pod \"obo-prometheus-operator-68bc856cb9-dq7x7\" (UID: \"a6e6ea79-b55a-4652-bc1f-4788eb17b6d4\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.921136 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f"] Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.921762 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.930729 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft"] Jan 22 00:18:15 crc kubenswrapper[4829]: I0122 00:18:15.940999 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.021218 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xt25\" (UniqueName: \"kubernetes.io/projected/a6e6ea79-b55a-4652-bc1f-4788eb17b6d4-kube-api-access-2xt25\") pod \"obo-prometheus-operator-68bc856cb9-dq7x7\" (UID: \"a6e6ea79-b55a-4652-bc1f-4788eb17b6d4\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.021313 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3cc8f122-dbed-494d-873d-dde35bf15c60-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-nmvft\" (UID: \"3cc8f122-dbed-494d-873d-dde35bf15c60\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.021352 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3cc8f122-dbed-494d-873d-dde35bf15c60-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-nmvft\" (UID: \"3cc8f122-dbed-494d-873d-dde35bf15c60\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.031410 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-klpvs"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.032232 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.038898 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-vtd7t" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.039162 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.054629 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xt25\" (UniqueName: \"kubernetes.io/projected/a6e6ea79-b55a-4652-bc1f-4788eb17b6d4-kube-api-access-2xt25\") pod \"obo-prometheus-operator-68bc856cb9-dq7x7\" (UID: \"a6e6ea79-b55a-4652-bc1f-4788eb17b6d4\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.076019 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-klpvs"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.101036 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.122645 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3cc8f122-dbed-494d-873d-dde35bf15c60-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-nmvft\" (UID: \"3cc8f122-dbed-494d-873d-dde35bf15c60\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.122715 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3cc8f122-dbed-494d-873d-dde35bf15c60-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-nmvft\" (UID: \"3cc8f122-dbed-494d-873d-dde35bf15c60\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.122756 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f\" (UID: \"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.122795 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f\" (UID: \"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.130350 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3cc8f122-dbed-494d-873d-dde35bf15c60-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-nmvft\" (UID: \"3cc8f122-dbed-494d-873d-dde35bf15c60\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.132103 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3cc8f122-dbed-494d-873d-dde35bf15c60-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-nmvft\" (UID: \"3cc8f122-dbed-494d-873d-dde35bf15c60\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.224127 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbdw4\" (UniqueName: \"kubernetes.io/projected/bdb33684-1dcb-41b0-904e-7bcb3aa2d1de-kube-api-access-gbdw4\") pod \"observability-operator-59bdc8b94-klpvs\" (UID: \"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de\") " pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.224320 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f\" (UID: \"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.224358 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f\" (UID: \"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.224387 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/bdb33684-1dcb-41b0-904e-7bcb3aa2d1de-observability-operator-tls\") pod \"observability-operator-59bdc8b94-klpvs\" (UID: \"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de\") " pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.232203 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f\" (UID: \"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.232507 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f\" (UID: \"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.234773 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.244801 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.324970 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbdw4\" (UniqueName: \"kubernetes.io/projected/bdb33684-1dcb-41b0-904e-7bcb3aa2d1de-kube-api-access-gbdw4\") pod \"observability-operator-59bdc8b94-klpvs\" (UID: \"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de\") " pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.325045 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/bdb33684-1dcb-41b0-904e-7bcb3aa2d1de-observability-operator-tls\") pod \"observability-operator-59bdc8b94-klpvs\" (UID: \"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de\") " pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.328707 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/bdb33684-1dcb-41b0-904e-7bcb3aa2d1de-observability-operator-tls\") pod \"observability-operator-59bdc8b94-klpvs\" (UID: \"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de\") " pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.342562 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-992z9"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.343571 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.364089 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-m6g64" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.371386 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-992z9"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.376291 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbdw4\" (UniqueName: \"kubernetes.io/projected/bdb33684-1dcb-41b0-904e-7bcb3aa2d1de-kube-api-access-gbdw4\") pod \"observability-operator-59bdc8b94-klpvs\" (UID: \"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de\") " pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.530401 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8606d02-0830-4b52-80b0-5e30e6003f08-openshift-service-ca\") pod \"perses-operator-5bf474d74f-992z9\" (UID: \"f8606d02-0830-4b52-80b0-5e30e6003f08\") " pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.530530 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx498\" (UniqueName: \"kubernetes.io/projected/f8606d02-0830-4b52-80b0-5e30e6003f08-kube-api-access-dx498\") pod \"perses-operator-5bf474d74f-992z9\" (UID: \"f8606d02-0830-4b52-80b0-5e30e6003f08\") " pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.618819 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.632323 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx498\" (UniqueName: \"kubernetes.io/projected/f8606d02-0830-4b52-80b0-5e30e6003f08-kube-api-access-dx498\") pod \"perses-operator-5bf474d74f-992z9\" (UID: \"f8606d02-0830-4b52-80b0-5e30e6003f08\") " pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.632392 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8606d02-0830-4b52-80b0-5e30e6003f08-openshift-service-ca\") pod \"perses-operator-5bf474d74f-992z9\" (UID: \"f8606d02-0830-4b52-80b0-5e30e6003f08\") " pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.636727 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/f8606d02-0830-4b52-80b0-5e30e6003f08-openshift-service-ca\") pod \"perses-operator-5bf474d74f-992z9\" (UID: \"f8606d02-0830-4b52-80b0-5e30e6003f08\") " pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.648688 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.654317 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx498\" (UniqueName: \"kubernetes.io/projected/f8606d02-0830-4b52-80b0-5e30e6003f08-kube-api-access-dx498\") pod \"perses-operator-5bf474d74f-992z9\" (UID: \"f8606d02-0830-4b52-80b0-5e30e6003f08\") " pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.674833 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.710093 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.719336 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" event={"ID":"a6e6ea79-b55a-4652-bc1f-4788eb17b6d4","Type":"ContainerStarted","Data":"218d1a530ede7aa51ccafa32aa6838d77b45e4f7d3edb6dfbe54d1f7d1ff0fb2"} Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.720190 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.726721 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" event={"ID":"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d","Type":"ContainerStarted","Data":"25b43bd6aa4a2a2ef36c267d094655c3ed1f95ea0edc7f77929fe5dc4ba8943b"} Jan 22 00:18:16 crc kubenswrapper[4829]: W0122 00:18:16.733089 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3cc8f122_dbed_494d_873d_dde35bf15c60.slice/crio-a68bc1dadc2a72ba3f75f81fc7862dbdbaf5fc926fb5d7d0f0f02cc70eaefbba WatchSource:0}: Error finding container a68bc1dadc2a72ba3f75f81fc7862dbdbaf5fc926fb5d7d0f0f02cc70eaefbba: Status 404 returned error can't find the container with id a68bc1dadc2a72ba3f75f81fc7862dbdbaf5fc926fb5d7d0f0f02cc70eaefbba Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.873287 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-klpvs"] Jan 22 00:18:16 crc kubenswrapper[4829]: I0122 00:18:16.926960 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-992z9"] Jan 22 00:18:16 crc kubenswrapper[4829]: W0122 00:18:16.952645 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf8606d02_0830_4b52_80b0_5e30e6003f08.slice/crio-ecf1e139ad43874dc5ce16e4d6e032db0b18900798960e7ffe5dd8e930a96801 WatchSource:0}: Error finding container ecf1e139ad43874dc5ce16e4d6e032db0b18900798960e7ffe5dd8e930a96801: Status 404 returned error can't find the container with id ecf1e139ad43874dc5ce16e4d6e032db0b18900798960e7ffe5dd8e930a96801 Jan 22 00:18:17 crc kubenswrapper[4829]: I0122 00:18:17.735777 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-klpvs" event={"ID":"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de","Type":"ContainerStarted","Data":"b89e710fe0e5b55e7ae8229e13b795bba5cf6e706e34f527759d994a40afa8fd"} Jan 22 00:18:17 crc kubenswrapper[4829]: I0122 00:18:17.736790 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-992z9" event={"ID":"f8606d02-0830-4b52-80b0-5e30e6003f08","Type":"ContainerStarted","Data":"ecf1e139ad43874dc5ce16e4d6e032db0b18900798960e7ffe5dd8e930a96801"} Jan 22 00:18:17 crc kubenswrapper[4829]: I0122 00:18:17.737900 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" event={"ID":"3cc8f122-dbed-494d-873d-dde35bf15c60","Type":"ContainerStarted","Data":"a68bc1dadc2a72ba3f75f81fc7862dbdbaf5fc926fb5d7d0f0f02cc70eaefbba"} Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.553017 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-754d769d7d-ngl5t"] Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.554928 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.558963 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.559082 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.559210 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-blh6m" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.560109 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.568793 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkx4z\" (UniqueName: \"kubernetes.io/projected/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-kube-api-access-jkx4z\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.568842 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-webhook-cert\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.568892 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-apiservice-cert\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.569327 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-754d769d7d-ngl5t"] Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.669984 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-apiservice-cert\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.670115 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkx4z\" (UniqueName: \"kubernetes.io/projected/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-kube-api-access-jkx4z\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.670151 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-webhook-cert\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.680173 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-webhook-cert\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.684177 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-apiservice-cert\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.696318 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkx4z\" (UniqueName: \"kubernetes.io/projected/6e0a5463-b4ff-4f3c-96e8-a041af2f985c-kube-api-access-jkx4z\") pod \"elastic-operator-754d769d7d-ngl5t\" (UID: \"6e0a5463-b4ff-4f3c-96e8-a041af2f985c\") " pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:19 crc kubenswrapper[4829]: I0122 00:18:19.882616 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.469253 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-28w49"] Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.470714 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.487070 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-vktht" Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.490110 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-28w49"] Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.514759 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8986l\" (UniqueName: \"kubernetes.io/projected/e7f04bc7-8fec-45a1-8d01-cbff2015ceda-kube-api-access-8986l\") pod \"interconnect-operator-5bb49f789d-28w49\" (UID: \"e7f04bc7-8fec-45a1-8d01-cbff2015ceda\") " pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.615847 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8986l\" (UniqueName: \"kubernetes.io/projected/e7f04bc7-8fec-45a1-8d01-cbff2015ceda-kube-api-access-8986l\") pod \"interconnect-operator-5bb49f789d-28w49\" (UID: \"e7f04bc7-8fec-45a1-8d01-cbff2015ceda\") " pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.635562 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8986l\" (UniqueName: \"kubernetes.io/projected/e7f04bc7-8fec-45a1-8d01-cbff2015ceda-kube-api-access-8986l\") pod \"interconnect-operator-5bb49f789d-28w49\" (UID: \"e7f04bc7-8fec-45a1-8d01-cbff2015ceda\") " pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" Jan 22 00:18:24 crc kubenswrapper[4829]: I0122 00:18:24.786249 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" Jan 22 00:18:31 crc kubenswrapper[4829]: E0122 00:18:31.314824 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8" Jan 22 00:18:31 crc kubenswrapper[4829]: E0122 00:18:31.315730 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:perses-operator,Image:registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{134217728 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openshift-service-ca,ReadOnly:true,MountPath:/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dx498,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod perses-operator-5bf474d74f-992z9_openshift-operators(f8606d02-0830-4b52-80b0-5e30e6003f08): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:18:31 crc kubenswrapper[4829]: E0122 00:18:31.320759 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/perses-operator-5bf474d74f-992z9" podUID="f8606d02-0830-4b52-80b0-5e30e6003f08" Jan 22 00:18:31 crc kubenswrapper[4829]: E0122 00:18:31.377732 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 22 00:18:31 crc kubenswrapper[4829]: E0122 00:18:31.377950 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-666d948cd9-nmvft_openshift-operators(3cc8f122-dbed-494d-873d-dde35bf15c60): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:18:31 crc kubenswrapper[4829]: E0122 00:18:31.379062 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" podUID="3cc8f122-dbed-494d-873d-dde35bf15c60" Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.763502 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-754d769d7d-ngl5t"] Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.778193 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-28w49"] Jan 22 00:18:31 crc kubenswrapper[4829]: W0122 00:18:31.785212 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7f04bc7_8fec_45a1_8d01_cbff2015ceda.slice/crio-17686dd0bcaccdde11e34ddcaafb1baef5a0b3eeea005b1736ba9931c6950a66 WatchSource:0}: Error finding container 17686dd0bcaccdde11e34ddcaafb1baef5a0b3eeea005b1736ba9931c6950a66: Status 404 returned error can't find the container with id 17686dd0bcaccdde11e34ddcaafb1baef5a0b3eeea005b1736ba9931c6950a66 Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.871650 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" event={"ID":"a6e6ea79-b55a-4652-bc1f-4788eb17b6d4","Type":"ContainerStarted","Data":"b2ffc0afaa5e941b2ae664987cae838bb833a81a0ad10cfb386ff18be67e3fe8"} Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.872858 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" event={"ID":"893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d","Type":"ContainerStarted","Data":"65177dd1627bddd169176e8dcdf33a14c2f47e0fc60c92aa84113af5d5932839"} Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.875439 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" event={"ID":"6e0a5463-b4ff-4f3c-96e8-a041af2f985c","Type":"ContainerStarted","Data":"b7ebe79deb0e993ef149823ea258eaed9ef21731c36932ae95828796f2571b04"} Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.876847 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" event={"ID":"e7f04bc7-8fec-45a1-8d01-cbff2015ceda","Type":"ContainerStarted","Data":"17686dd0bcaccdde11e34ddcaafb1baef5a0b3eeea005b1736ba9931c6950a66"} Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.878143 4829 generic.go:334] "Generic (PLEG): container finished" podID="0225cd53-73fa-4345-85b3-7de7de23f707" containerID="68c38f0e0bccfb111e4ccb65ee5bce45d1c649b573d66806627b4fa5c6c75989" exitCode=0 Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.878190 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" event={"ID":"0225cd53-73fa-4345-85b3-7de7de23f707","Type":"ContainerDied","Data":"68c38f0e0bccfb111e4ccb65ee5bce45d1c649b573d66806627b4fa5c6c75989"} Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.884704 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-klpvs" event={"ID":"bdb33684-1dcb-41b0-904e-7bcb3aa2d1de","Type":"ContainerStarted","Data":"614b9fe685933832f4e8413491dfd4ce7a5c6fa4bb9220afc9089e3336f85b32"} Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.885226 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.887184 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-klpvs" Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.895026 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-dq7x7" podStartSLOduration=2.164582346 podStartE2EDuration="16.895009286s" podCreationTimestamp="2026-01-22 00:18:15 +0000 UTC" firstStartedPulling="2026-01-22 00:18:16.624246557 +0000 UTC m=+674.660488469" lastFinishedPulling="2026-01-22 00:18:31.354673497 +0000 UTC m=+689.390915409" observedRunningTime="2026-01-22 00:18:31.893565641 +0000 UTC m=+689.929807563" watchObservedRunningTime="2026-01-22 00:18:31.895009286 +0000 UTC m=+689.931251188" Jan 22 00:18:31 crc kubenswrapper[4829]: E0122 00:18:31.897926 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"perses-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/perses-rhel9-operator@sha256:b5c8526d2ae660fe092dd8a7acf18ec4957d5c265890a222f55396fc2cdaeed8\\\"\"" pod="openshift-operators/perses-operator-5bf474d74f-992z9" podUID="f8606d02-0830-4b52-80b0-5e30e6003f08" Jan 22 00:18:31 crc kubenswrapper[4829]: I0122 00:18:31.926557 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f" podStartSLOduration=2.213751581 podStartE2EDuration="16.92652791s" podCreationTimestamp="2026-01-22 00:18:15 +0000 UTC" firstStartedPulling="2026-01-22 00:18:16.689871685 +0000 UTC m=+674.726113597" lastFinishedPulling="2026-01-22 00:18:31.402648014 +0000 UTC m=+689.438889926" observedRunningTime="2026-01-22 00:18:31.925310841 +0000 UTC m=+689.961552753" watchObservedRunningTime="2026-01-22 00:18:31.92652791 +0000 UTC m=+689.962769822" Jan 22 00:18:32 crc kubenswrapper[4829]: I0122 00:18:32.047499 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-klpvs" podStartSLOduration=2.538485349 podStartE2EDuration="17.04748153s" podCreationTimestamp="2026-01-22 00:18:15 +0000 UTC" firstStartedPulling="2026-01-22 00:18:16.892833166 +0000 UTC m=+674.929075078" lastFinishedPulling="2026-01-22 00:18:31.401829357 +0000 UTC m=+689.438071259" observedRunningTime="2026-01-22 00:18:32.045331051 +0000 UTC m=+690.081572963" watchObservedRunningTime="2026-01-22 00:18:32.04748153 +0000 UTC m=+690.083723442" Jan 22 00:18:32 crc kubenswrapper[4829]: I0122 00:18:32.896962 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" event={"ID":"3cc8f122-dbed-494d-873d-dde35bf15c60","Type":"ContainerStarted","Data":"8f1e414093b9697fac6af057533e541923a0248685d5eb4cf04e079edf7de1fe"} Jan 22 00:18:32 crc kubenswrapper[4829]: I0122 00:18:32.904681 4829 generic.go:334] "Generic (PLEG): container finished" podID="0225cd53-73fa-4345-85b3-7de7de23f707" containerID="162e5fc0e53fdfadc8805c4827477754976869f4889170b28360ed796083f44d" exitCode=0 Jan 22 00:18:32 crc kubenswrapper[4829]: I0122 00:18:32.904815 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" event={"ID":"0225cd53-73fa-4345-85b3-7de7de23f707","Type":"ContainerDied","Data":"162e5fc0e53fdfadc8805c4827477754976869f4889170b28360ed796083f44d"} Jan 22 00:18:32 crc kubenswrapper[4829]: I0122 00:18:32.921070 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-666d948cd9-nmvft" podStartSLOduration=-9223372018.933725 podStartE2EDuration="17.921051187s" podCreationTimestamp="2026-01-22 00:18:15 +0000 UTC" firstStartedPulling="2026-01-22 00:18:16.737691728 +0000 UTC m=+674.773933640" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:18:32.918074462 +0000 UTC m=+690.954316374" watchObservedRunningTime="2026-01-22 00:18:32.921051187 +0000 UTC m=+690.957293099" Jan 22 00:18:34 crc kubenswrapper[4829]: I0122 00:18:34.951198 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" event={"ID":"0225cd53-73fa-4345-85b3-7de7de23f707","Type":"ContainerDied","Data":"3804ff49b1e73961f2c771ee87fe13c46b5176bd3a154144e471d8cc05540a36"} Jan 22 00:18:34 crc kubenswrapper[4829]: I0122 00:18:34.951468 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3804ff49b1e73961f2c771ee87fe13c46b5176bd3a154144e471d8cc05540a36" Jan 22 00:18:34 crc kubenswrapper[4829]: I0122 00:18:34.966918 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.057126 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-bundle\") pod \"0225cd53-73fa-4345-85b3-7de7de23f707\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.057260 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-util\") pod \"0225cd53-73fa-4345-85b3-7de7de23f707\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.057321 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfv5w\" (UniqueName: \"kubernetes.io/projected/0225cd53-73fa-4345-85b3-7de7de23f707-kube-api-access-cfv5w\") pod \"0225cd53-73fa-4345-85b3-7de7de23f707\" (UID: \"0225cd53-73fa-4345-85b3-7de7de23f707\") " Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.058525 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-bundle" (OuterVolumeSpecName: "bundle") pod "0225cd53-73fa-4345-85b3-7de7de23f707" (UID: "0225cd53-73fa-4345-85b3-7de7de23f707"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.067927 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0225cd53-73fa-4345-85b3-7de7de23f707-kube-api-access-cfv5w" (OuterVolumeSpecName: "kube-api-access-cfv5w") pod "0225cd53-73fa-4345-85b3-7de7de23f707" (UID: "0225cd53-73fa-4345-85b3-7de7de23f707"). InnerVolumeSpecName "kube-api-access-cfv5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.073748 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-util" (OuterVolumeSpecName: "util") pod "0225cd53-73fa-4345-85b3-7de7de23f707" (UID: "0225cd53-73fa-4345-85b3-7de7de23f707"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.159063 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfv5w\" (UniqueName: \"kubernetes.io/projected/0225cd53-73fa-4345-85b3-7de7de23f707-kube-api-access-cfv5w\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.159346 4829 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.159355 4829 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/0225cd53-73fa-4345-85b3-7de7de23f707-util\") on node \"crc\" DevicePath \"\"" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.961516 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h" Jan 22 00:18:35 crc kubenswrapper[4829]: I0122 00:18:35.962711 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" event={"ID":"6e0a5463-b4ff-4f3c-96e8-a041af2f985c","Type":"ContainerStarted","Data":"eac224fd1212e0f56f49788a1084107be6302a5d45e089cacd4a5a6752a06339"} Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.000568 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-754d769d7d-ngl5t" podStartSLOduration=13.800520771 podStartE2EDuration="17.000553052s" podCreationTimestamp="2026-01-22 00:18:19 +0000 UTC" firstStartedPulling="2026-01-22 00:18:31.779893422 +0000 UTC m=+689.816135334" lastFinishedPulling="2026-01-22 00:18:34.979925703 +0000 UTC m=+693.016167615" observedRunningTime="2026-01-22 00:18:35.998567368 +0000 UTC m=+694.034809290" watchObservedRunningTime="2026-01-22 00:18:36.000553052 +0000 UTC m=+694.036794974" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.577811 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 22 00:18:36 crc kubenswrapper[4829]: E0122 00:18:36.578056 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0225cd53-73fa-4345-85b3-7de7de23f707" containerName="pull" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.578071 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0225cd53-73fa-4345-85b3-7de7de23f707" containerName="pull" Jan 22 00:18:36 crc kubenswrapper[4829]: E0122 00:18:36.578085 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0225cd53-73fa-4345-85b3-7de7de23f707" containerName="util" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.578093 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0225cd53-73fa-4345-85b3-7de7de23f707" containerName="util" Jan 22 00:18:36 crc kubenswrapper[4829]: E0122 00:18:36.578102 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0225cd53-73fa-4345-85b3-7de7de23f707" containerName="extract" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.578110 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0225cd53-73fa-4345-85b3-7de7de23f707" containerName="extract" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.578205 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="0225cd53-73fa-4345-85b3-7de7de23f707" containerName="extract" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.579068 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.581959 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.582062 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.582154 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.582230 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.582284 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.582344 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.582347 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.582656 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-kn6ds" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.585688 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.598784 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.694861 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.694915 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/39d65618-bed4-44b9-8893-94d1381c5421-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.694940 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695027 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695051 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695094 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695117 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695134 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695264 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695318 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695355 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695382 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695489 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695524 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.695576 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796311 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796376 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796394 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796431 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796459 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796485 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/39d65618-bed4-44b9-8893-94d1381c5421-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796502 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796525 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796558 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796580 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796598 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796614 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796637 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796652 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.796684 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.797516 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.797810 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.798093 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.798717 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.800917 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.801241 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.801624 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/39d65618-bed4-44b9-8893-94d1381c5421-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.802214 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.803282 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.805254 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.805687 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.817304 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.820101 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.820702 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/39d65618-bed4-44b9-8893-94d1381c5421-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.823334 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/39d65618-bed4-44b9-8893-94d1381c5421-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"39d65618-bed4-44b9-8893-94d1381c5421\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:36 crc kubenswrapper[4829]: I0122 00:18:36.900452 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:18:37 crc kubenswrapper[4829]: I0122 00:18:37.199931 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 22 00:18:37 crc kubenswrapper[4829]: I0122 00:18:37.973625 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"39d65618-bed4-44b9-8893-94d1381c5421","Type":"ContainerStarted","Data":"ca150e5156fc7060d336c75bbedec72cfe64df439e100b445382a1274d112c62"} Jan 22 00:18:43 crc kubenswrapper[4829]: I0122 00:18:43.026878 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" event={"ID":"e7f04bc7-8fec-45a1-8d01-cbff2015ceda","Type":"ContainerStarted","Data":"2efa4ec0b0ac7e31ad59345a885b1eaee07d5925e07dd53606cc7c19d24ff818"} Jan 22 00:18:43 crc kubenswrapper[4829]: I0122 00:18:43.048329 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-28w49" podStartSLOduration=8.50053826 podStartE2EDuration="19.048312151s" podCreationTimestamp="2026-01-22 00:18:24 +0000 UTC" firstStartedPulling="2026-01-22 00:18:31.79019951 +0000 UTC m=+689.826441432" lastFinishedPulling="2026-01-22 00:18:42.337973411 +0000 UTC m=+700.374215323" observedRunningTime="2026-01-22 00:18:43.044718277 +0000 UTC m=+701.080960269" watchObservedRunningTime="2026-01-22 00:18:43.048312151 +0000 UTC m=+701.084554083" Jan 22 00:18:50 crc kubenswrapper[4829]: I0122 00:18:50.949267 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk"] Jan 22 00:18:50 crc kubenswrapper[4829]: I0122 00:18:50.950185 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:50 crc kubenswrapper[4829]: I0122 00:18:50.957100 4829 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-f88rf" Jan 22 00:18:50 crc kubenswrapper[4829]: I0122 00:18:50.957273 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 22 00:18:50 crc kubenswrapper[4829]: I0122 00:18:50.957300 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 22 00:18:50 crc kubenswrapper[4829]: I0122 00:18:50.985433 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk"] Jan 22 00:18:51 crc kubenswrapper[4829]: I0122 00:18:51.022375 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-5swgk\" (UID: \"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:51 crc kubenswrapper[4829]: I0122 00:18:51.022433 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n2zh\" (UniqueName: \"kubernetes.io/projected/2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea-kube-api-access-8n2zh\") pod \"cert-manager-operator-controller-manager-5446d6888b-5swgk\" (UID: \"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:51 crc kubenswrapper[4829]: I0122 00:18:51.123337 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n2zh\" (UniqueName: \"kubernetes.io/projected/2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea-kube-api-access-8n2zh\") pod \"cert-manager-operator-controller-manager-5446d6888b-5swgk\" (UID: \"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:51 crc kubenswrapper[4829]: I0122 00:18:51.123428 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-5swgk\" (UID: \"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:51 crc kubenswrapper[4829]: I0122 00:18:51.123960 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-5swgk\" (UID: \"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:51 crc kubenswrapper[4829]: I0122 00:18:51.141238 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n2zh\" (UniqueName: \"kubernetes.io/projected/2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea-kube-api-access-8n2zh\") pod \"cert-manager-operator-controller-manager-5446d6888b-5swgk\" (UID: \"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:51 crc kubenswrapper[4829]: I0122 00:18:51.269517 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" Jan 22 00:18:54 crc kubenswrapper[4829]: I0122 00:18:54.727826 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk"] Jan 22 00:18:54 crc kubenswrapper[4829]: W0122 00:18:54.741734 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d2c6a12_4219_4fd0_8d8d_596fa5ce01ea.slice/crio-fcb480709fb0062e55b3a274008c9349cd79a5600cfed61f00fcec97b457a622 WatchSource:0}: Error finding container fcb480709fb0062e55b3a274008c9349cd79a5600cfed61f00fcec97b457a622: Status 404 returned error can't find the container with id fcb480709fb0062e55b3a274008c9349cd79a5600cfed61f00fcec97b457a622 Jan 22 00:18:55 crc kubenswrapper[4829]: I0122 00:18:55.119146 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"39d65618-bed4-44b9-8893-94d1381c5421","Type":"ContainerStarted","Data":"472e1e849f76a37b6f029cc040588d58e14a8fc6d194cd2a65fc9729cb96f486"} Jan 22 00:18:55 crc kubenswrapper[4829]: I0122 00:18:55.120525 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" event={"ID":"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea","Type":"ContainerStarted","Data":"fcb480709fb0062e55b3a274008c9349cd79a5600cfed61f00fcec97b457a622"} Jan 22 00:18:55 crc kubenswrapper[4829]: I0122 00:18:55.122022 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-992z9" event={"ID":"f8606d02-0830-4b52-80b0-5e30e6003f08","Type":"ContainerStarted","Data":"1c5585913da2707a1b2727090b8d8ffefeb2d05f8b860b652c5efe5269c669e8"} Jan 22 00:18:55 crc kubenswrapper[4829]: I0122 00:18:55.122225 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:18:55 crc kubenswrapper[4829]: I0122 00:18:55.187058 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-992z9" podStartSLOduration=1.716013389 podStartE2EDuration="39.187004553s" podCreationTimestamp="2026-01-22 00:18:16 +0000 UTC" firstStartedPulling="2026-01-22 00:18:16.955312605 +0000 UTC m=+674.991554517" lastFinishedPulling="2026-01-22 00:18:54.426303759 +0000 UTC m=+712.462545681" observedRunningTime="2026-01-22 00:18:55.182569992 +0000 UTC m=+713.218811914" watchObservedRunningTime="2026-01-22 00:18:55.187004553 +0000 UTC m=+713.223246465" Jan 22 00:18:55 crc kubenswrapper[4829]: I0122 00:18:55.272074 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 22 00:18:55 crc kubenswrapper[4829]: I0122 00:18:55.314458 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 22 00:18:57 crc kubenswrapper[4829]: I0122 00:18:57.143463 4829 generic.go:334] "Generic (PLEG): container finished" podID="39d65618-bed4-44b9-8893-94d1381c5421" containerID="472e1e849f76a37b6f029cc040588d58e14a8fc6d194cd2a65fc9729cb96f486" exitCode=0 Jan 22 00:18:57 crc kubenswrapper[4829]: I0122 00:18:57.143501 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"39d65618-bed4-44b9-8893-94d1381c5421","Type":"ContainerDied","Data":"472e1e849f76a37b6f029cc040588d58e14a8fc6d194cd2a65fc9729cb96f486"} Jan 22 00:18:58 crc kubenswrapper[4829]: I0122 00:18:58.152674 4829 generic.go:334] "Generic (PLEG): container finished" podID="39d65618-bed4-44b9-8893-94d1381c5421" containerID="ed3d5408358ff505ce33929e7f8a47f40d7e07392dc8ca5e64a367a3a6b040ae" exitCode=0 Jan 22 00:18:58 crc kubenswrapper[4829]: I0122 00:18:58.153065 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"39d65618-bed4-44b9-8893-94d1381c5421","Type":"ContainerDied","Data":"ed3d5408358ff505ce33929e7f8a47f40d7e07392dc8ca5e64a367a3a6b040ae"} Jan 22 00:19:04 crc kubenswrapper[4829]: I0122 00:19:04.659017 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:19:04 crc kubenswrapper[4829]: I0122 00:19:04.659673 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:19:06 crc kubenswrapper[4829]: I0122 00:19:06.714466 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-992z9" Jan 22 00:19:08 crc kubenswrapper[4829]: E0122 00:19:08.312583 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911" Jan 22 00:19:08 crc kubenswrapper[4829]: E0122 00:19:08.313049 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cert-manager-operator,Image:registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911,Command:[/usr/bin/cert-manager-operator],Args:[start --v=$(OPERATOR_LOG_LEVEL) --trusted-ca-configmap=$(TRUSTED_CA_CONFIGMAP_NAME) --cloud-credentials-secret=$(CLOUD_CREDENTIALS_SECRET_NAME) --unsupported-addon-features=$(UNSUPPORTED_ADDON_FEATURES)],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:cert-manager-operator,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_WEBHOOK,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CA_INJECTOR,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CONTROLLER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ACMESOLVER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-acmesolver-rhel9@sha256:ba937fc4b9eee31422914352c11a45b90754ba4fbe490ea45249b90afdc4e0a7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ISTIOCSR,Value:registry.redhat.io/cert-manager/cert-manager-istio-csr-rhel9@sha256:af1ac813b8ee414ef215936f05197bc498bccbd540f3e2a93cb522221ba112bc,ValueFrom:nil,},EnvVar{Name:OPERAND_IMAGE_VERSION,Value:1.18.3,ValueFrom:nil,},EnvVar{Name:ISTIOCSR_OPERAND_IMAGE_VERSION,Value:0.14.2,ValueFrom:nil,},EnvVar{Name:OPERATOR_IMAGE_VERSION,Value:1.18.0,ValueFrom:nil,},EnvVar{Name:OPERATOR_LOG_LEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:TRUSTED_CA_CONFIGMAP_NAME,Value:,ValueFrom:nil,},EnvVar{Name:CLOUD_CREDENTIALS_SECRET_NAME,Value:,ValueFrom:nil,},EnvVar{Name:UNSUPPORTED_ADDON_FEATURES,Value:,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cert-manager-operator.v1.18.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{33554432 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:tmp,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8n2zh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000680000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cert-manager-operator-controller-manager-5446d6888b-5swgk_cert-manager-operator(2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 00:19:08 crc kubenswrapper[4829]: E0122 00:19:08.314295 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" podUID="2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea" Jan 22 00:19:09 crc kubenswrapper[4829]: I0122 00:19:09.242553 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"39d65618-bed4-44b9-8893-94d1381c5421","Type":"ContainerStarted","Data":"b15f7e222df3953d41463a9d664d0ef6330e17fd0ab377f3fc0b9e5fc6ec04c5"} Jan 22 00:19:09 crc kubenswrapper[4829]: I0122 00:19:09.242874 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:19:09 crc kubenswrapper[4829]: E0122 00:19:09.243371 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911\\\"\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" podUID="2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea" Jan 22 00:19:09 crc kubenswrapper[4829]: I0122 00:19:09.354445 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=15.958513222 podStartE2EDuration="33.354423499s" podCreationTimestamp="2026-01-22 00:18:36 +0000 UTC" firstStartedPulling="2026-01-22 00:18:37.215721071 +0000 UTC m=+695.251963003" lastFinishedPulling="2026-01-22 00:18:54.611631378 +0000 UTC m=+712.647873280" observedRunningTime="2026-01-22 00:19:09.35216542 +0000 UTC m=+727.388407322" watchObservedRunningTime="2026-01-22 00:19:09.354423499 +0000 UTC m=+727.390665411" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.883460 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.884892 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.886766 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-sys-config" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.886783 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-global-ca" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.887051 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-1-ca" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.887263 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.902940 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949250 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949300 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949328 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949348 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949370 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949496 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949684 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949715 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949749 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949779 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949929 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:13 crc kubenswrapper[4829]: I0122 00:19:13.949984 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnjfn\" (UniqueName: \"kubernetes.io/projected/08e5360b-ccc3-40a1-a58a-568002f1317c-kube-api-access-rnjfn\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051062 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051123 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051153 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051181 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051217 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051242 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051302 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051331 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051334 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-node-pullsecrets\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051343 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-buildcachedir\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051363 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051510 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051600 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051619 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-root\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051637 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnjfn\" (UniqueName: \"kubernetes.io/projected/08e5360b-ccc3-40a1-a58a-568002f1317c-kube-api-access-rnjfn\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051792 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-buildworkdir\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.051855 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-build-blob-cache\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.052021 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-run\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.052247 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-system-configs\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.052301 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-proxy-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.052348 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-ca-bundles\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.057064 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.057553 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.067491 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnjfn\" (UniqueName: \"kubernetes.io/projected/08e5360b-ccc3-40a1-a58a-568002f1317c-kube-api-access-rnjfn\") pod \"service-telemetry-operator-1-build\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.197925 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:14 crc kubenswrapper[4829]: I0122 00:19:14.700534 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 22 00:19:14 crc kubenswrapper[4829]: W0122 00:19:14.715718 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08e5360b_ccc3_40a1_a58a_568002f1317c.slice/crio-40cfe60d95a8738cf92fa4e49c4a6260464e7a937a9b3c7bb1e09905df0f7c31 WatchSource:0}: Error finding container 40cfe60d95a8738cf92fa4e49c4a6260464e7a937a9b3c7bb1e09905df0f7c31: Status 404 returned error can't find the container with id 40cfe60d95a8738cf92fa4e49c4a6260464e7a937a9b3c7bb1e09905df0f7c31 Jan 22 00:19:15 crc kubenswrapper[4829]: I0122 00:19:15.280765 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"08e5360b-ccc3-40a1-a58a-568002f1317c","Type":"ContainerStarted","Data":"40cfe60d95a8738cf92fa4e49c4a6260464e7a937a9b3c7bb1e09905df0f7c31"} Jan 22 00:19:21 crc kubenswrapper[4829]: I0122 00:19:21.394304 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"08e5360b-ccc3-40a1-a58a-568002f1317c","Type":"ContainerStarted","Data":"7ce6a6b4a665d655b93c979001415c9308d9c5b3995ab54b9684d22f004945b8"} Jan 22 00:19:21 crc kubenswrapper[4829]: I0122 00:19:21.999135 4829 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="39d65618-bed4-44b9-8893-94d1381c5421" containerName="elasticsearch" probeResult="failure" output=< Jan 22 00:19:21 crc kubenswrapper[4829]: {"timestamp": "2026-01-22T00:19:21+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 22 00:19:21 crc kubenswrapper[4829]: > Jan 22 00:19:22 crc kubenswrapper[4829]: I0122 00:19:22.401844 4829 generic.go:334] "Generic (PLEG): container finished" podID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerID="7ce6a6b4a665d655b93c979001415c9308d9c5b3995ab54b9684d22f004945b8" exitCode=0 Jan 22 00:19:22 crc kubenswrapper[4829]: I0122 00:19:22.401896 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"08e5360b-ccc3-40a1-a58a-568002f1317c","Type":"ContainerDied","Data":"7ce6a6b4a665d655b93c979001415c9308d9c5b3995ab54b9684d22f004945b8"} Jan 22 00:19:23 crc kubenswrapper[4829]: I0122 00:19:23.409644 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"08e5360b-ccc3-40a1-a58a-568002f1317c","Type":"ContainerStarted","Data":"09ade97a7baa722ea9894a25ea514de92d4523b318f86f9c93f5af29d6543d45"} Jan 22 00:19:23 crc kubenswrapper[4829]: I0122 00:19:23.443694 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-1-build" podStartSLOduration=4.183345096 podStartE2EDuration="10.443674262s" podCreationTimestamp="2026-01-22 00:19:13 +0000 UTC" firstStartedPulling="2026-01-22 00:19:14.717253374 +0000 UTC m=+732.753495286" lastFinishedPulling="2026-01-22 00:19:20.97758254 +0000 UTC m=+739.013824452" observedRunningTime="2026-01-22 00:19:23.440383677 +0000 UTC m=+741.476625619" watchObservedRunningTime="2026-01-22 00:19:23.443674262 +0000 UTC m=+741.479916174" Jan 22 00:19:24 crc kubenswrapper[4829]: I0122 00:19:24.285115 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 22 00:19:25 crc kubenswrapper[4829]: I0122 00:19:25.422460 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-1-build" podUID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerName="docker-build" containerID="cri-o://09ade97a7baa722ea9894a25ea514de92d4523b318f86f9c93f5af29d6543d45" gracePeriod=30 Jan 22 00:19:25 crc kubenswrapper[4829]: I0122 00:19:25.947509 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 22 00:19:25 crc kubenswrapper[4829]: I0122 00:19:25.948604 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:25 crc kubenswrapper[4829]: I0122 00:19:25.953021 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-ca" Jan 22 00:19:25 crc kubenswrapper[4829]: I0122 00:19:25.953234 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-sys-config" Jan 22 00:19:25 crc kubenswrapper[4829]: I0122 00:19:25.953399 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-2-global-ca" Jan 22 00:19:25 crc kubenswrapper[4829]: I0122 00:19:25.971573 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.077612 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.077704 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.077832 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.077936 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.077971 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.077995 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.078020 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.078099 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.078171 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.078194 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwvhx\" (UniqueName: \"kubernetes.io/projected/8bb811d9-cffa-4113-a542-00ecedd4efbc-kube-api-access-zwvhx\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.078253 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.078306 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.179921 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.179991 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180038 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180074 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180109 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180150 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180170 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildcachedir\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180505 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildworkdir\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180183 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-node-pullsecrets\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180185 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180610 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwvhx\" (UniqueName: \"kubernetes.io/projected/8bb811d9-cffa-4113-a542-00ecedd4efbc-kube-api-access-zwvhx\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180651 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180669 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-blob-cache\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180682 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180871 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180963 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.180990 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-root\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.181459 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-run\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.181503 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-proxy-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.181527 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-system-configs\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.181750 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-ca-bundles\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.189087 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.197503 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.210678 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwvhx\" (UniqueName: \"kubernetes.io/projected/8bb811d9-cffa-4113-a542-00ecedd4efbc-kube-api-access-zwvhx\") pod \"service-telemetry-operator-2-build\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.264632 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:26 crc kubenswrapper[4829]: I0122 00:19:26.533983 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 22 00:19:26 crc kubenswrapper[4829]: W0122 00:19:26.542413 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bb811d9_cffa_4113_a542_00ecedd4efbc.slice/crio-4b6fdb7e4a211c2e670ad32448347de60a9d5ae9aeb3e7ef41e034084e43b699 WatchSource:0}: Error finding container 4b6fdb7e4a211c2e670ad32448347de60a9d5ae9aeb3e7ef41e034084e43b699: Status 404 returned error can't find the container with id 4b6fdb7e4a211c2e670ad32448347de60a9d5ae9aeb3e7ef41e034084e43b699 Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.220071 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.463659 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_08e5360b-ccc3-40a1-a58a-568002f1317c/docker-build/0.log" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.464778 4829 generic.go:334] "Generic (PLEG): container finished" podID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerID="09ade97a7baa722ea9894a25ea514de92d4523b318f86f9c93f5af29d6543d45" exitCode=1 Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.464868 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"08e5360b-ccc3-40a1-a58a-568002f1317c","Type":"ContainerDied","Data":"09ade97a7baa722ea9894a25ea514de92d4523b318f86f9c93f5af29d6543d45"} Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.468385 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"8bb811d9-cffa-4113-a542-00ecedd4efbc","Type":"ContainerStarted","Data":"d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012"} Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.468438 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"8bb811d9-cffa-4113-a542-00ecedd4efbc","Type":"ContainerStarted","Data":"4b6fdb7e4a211c2e670ad32448347de60a9d5ae9aeb3e7ef41e034084e43b699"} Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.471095 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" event={"ID":"2d2c6a12-4219-4fd0-8d8d-596fa5ce01ea","Type":"ContainerStarted","Data":"609adc68ac2763645f8d02f2ce786a2a299ea3ef911f06e5c2f4f12f64de4abe"} Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.527078 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5swgk" podStartSLOduration=5.425011859 podStartE2EDuration="37.527055989s" podCreationTimestamp="2026-01-22 00:18:50 +0000 UTC" firstStartedPulling="2026-01-22 00:18:54.744452886 +0000 UTC m=+712.780694798" lastFinishedPulling="2026-01-22 00:19:26.846496966 +0000 UTC m=+744.882738928" observedRunningTime="2026-01-22 00:19:27.516860599 +0000 UTC m=+745.553102521" watchObservedRunningTime="2026-01-22 00:19:27.527055989 +0000 UTC m=+745.563297901" Jan 22 00:19:27 crc kubenswrapper[4829]: E0122 00:19:27.541293 4829 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=6956309506909475776, SKID=, AKID=58:29:81:64:0A:06:19:68:BE:C7:23:6D:77:93:1F:D0:C2:75:D5:20 failed: x509: certificate signed by unknown authority" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.623448 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_08e5360b-ccc3-40a1-a58a-568002f1317c/docker-build/0.log" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.623878 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805138 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-push\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805193 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-pull\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805226 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnjfn\" (UniqueName: \"kubernetes.io/projected/08e5360b-ccc3-40a1-a58a-568002f1317c-kube-api-access-rnjfn\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805260 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-node-pullsecrets\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805297 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-proxy-ca-bundles\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805318 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-system-configs\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805344 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-run\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805359 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805418 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-build-blob-cache\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805441 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-buildcachedir\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805465 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-ca-bundles\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805519 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-root\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805620 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-buildworkdir\") pod \"08e5360b-ccc3-40a1-a58a-568002f1317c\" (UID: \"08e5360b-ccc3-40a1-a58a-568002f1317c\") " Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.805870 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.806134 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.806573 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.806760 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.806851 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.807237 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.808127 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.808275 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.808533 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.811376 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08e5360b-ccc3-40a1-a58a-568002f1317c-kube-api-access-rnjfn" (OuterVolumeSpecName: "kube-api-access-rnjfn") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "kube-api-access-rnjfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.817605 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.817688 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "08e5360b-ccc3-40a1-a58a-568002f1317c" (UID: "08e5360b-ccc3-40a1-a58a-568002f1317c"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907636 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907662 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/08e5360b-ccc3-40a1-a58a-568002f1317c-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907671 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907679 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907689 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907698 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907706 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/08e5360b-ccc3-40a1-a58a-568002f1317c-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907716 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnjfn\" (UniqueName: \"kubernetes.io/projected/08e5360b-ccc3-40a1-a58a-568002f1317c-kube-api-access-rnjfn\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907723 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907732 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/08e5360b-ccc3-40a1-a58a-568002f1317c-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:27 crc kubenswrapper[4829]: I0122 00:19:27.907740 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/08e5360b-ccc3-40a1-a58a-568002f1317c-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.478580 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-1-build_08e5360b-ccc3-40a1-a58a-568002f1317c/docker-build/0.log" Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.479019 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-1-build" event={"ID":"08e5360b-ccc3-40a1-a58a-568002f1317c","Type":"ContainerDied","Data":"40cfe60d95a8738cf92fa4e49c4a6260464e7a937a9b3c7bb1e09905df0f7c31"} Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.479084 4829 scope.go:117] "RemoveContainer" containerID="09ade97a7baa722ea9894a25ea514de92d4523b318f86f9c93f5af29d6543d45" Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.479048 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-1-build" Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.506516 4829 scope.go:117] "RemoveContainer" containerID="7ce6a6b4a665d655b93c979001415c9308d9c5b3995ab54b9684d22f004945b8" Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.536643 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.552647 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-1-build"] Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.571411 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08e5360b-ccc3-40a1-a58a-568002f1317c" path="/var/lib/kubelet/pods/08e5360b-ccc3-40a1-a58a-568002f1317c/volumes" Jan 22 00:19:28 crc kubenswrapper[4829]: I0122 00:19:28.581726 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 22 00:19:29 crc kubenswrapper[4829]: I0122 00:19:29.485591 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-2-build" podUID="8bb811d9-cffa-4113-a542-00ecedd4efbc" containerName="git-clone" containerID="cri-o://d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012" gracePeriod=30 Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:29.874848 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_8bb811d9-cffa-4113-a542-00ecedd4efbc/git-clone/0.log" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:29.874922 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.157792 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vwmph"] Jan 22 00:19:30 crc kubenswrapper[4829]: E0122 00:19:30.158172 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerName="docker-build" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.158210 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerName="docker-build" Jan 22 00:19:30 crc kubenswrapper[4829]: E0122 00:19:30.158223 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerName="manage-dockerfile" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.158230 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerName="manage-dockerfile" Jan 22 00:19:30 crc kubenswrapper[4829]: E0122 00:19:30.158245 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bb811d9-cffa-4113-a542-00ecedd4efbc" containerName="git-clone" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.158251 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bb811d9-cffa-4113-a542-00ecedd4efbc" containerName="git-clone" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.158352 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="08e5360b-ccc3-40a1-a58a-568002f1317c" containerName="docker-build" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.158370 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bb811d9-cffa-4113-a542-00ecedd4efbc" containerName="git-clone" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.158877 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.161736 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.162563 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.167161 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vwmph"] Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.170516 4829 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-qvjjr" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.232239 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-blob-cache\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.232480 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-push\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.232621 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-pull\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.232707 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-proxy-ca-bundles\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.232777 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildcachedir\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.232881 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-run\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.232968 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-ca-bundles\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.233053 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildworkdir\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.233134 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-node-pullsecrets\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.233197 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwvhx\" (UniqueName: \"kubernetes.io/projected/8bb811d9-cffa-4113-a542-00ecedd4efbc-kube-api-access-zwvhx\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.233294 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-system-configs\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.233383 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-root\") pod \"8bb811d9-cffa-4113-a542-00ecedd4efbc\" (UID: \"8bb811d9-cffa-4113-a542-00ecedd4efbc\") " Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.234348 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.235070 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.235225 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.235492 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.235973 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.236222 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.236632 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.236891 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.237070 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.243697 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.243739 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bb811d9-cffa-4113-a542-00ecedd4efbc-kube-api-access-zwvhx" (OuterVolumeSpecName: "kube-api-access-zwvhx") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "kube-api-access-zwvhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.259248 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "8bb811d9-cffa-4113-a542-00ecedd4efbc" (UID: "8bb811d9-cffa-4113-a542-00ecedd4efbc"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.334375 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a13fcd99-a750-4831-a905-548b24cccf48-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vwmph\" (UID: \"a13fcd99-a750-4831-a905-548b24cccf48\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.334899 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2zsb\" (UniqueName: \"kubernetes.io/projected/a13fcd99-a750-4831-a905-548b24cccf48-kube-api-access-v2zsb\") pod \"cert-manager-webhook-f4fb5df64-vwmph\" (UID: \"a13fcd99-a750-4831-a905-548b24cccf48\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335160 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335206 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335220 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwvhx\" (UniqueName: \"kubernetes.io/projected/8bb811d9-cffa-4113-a542-00ecedd4efbc-kube-api-access-zwvhx\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335234 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335247 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335257 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335269 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335282 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8bb811d9-cffa-4113-a542-00ecedd4efbc-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335294 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335306 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8bb811d9-cffa-4113-a542-00ecedd4efbc-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335317 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8bb811d9-cffa-4113-a542-00ecedd4efbc-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.335328 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8bb811d9-cffa-4113-a542-00ecedd4efbc-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.436222 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a13fcd99-a750-4831-a905-548b24cccf48-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vwmph\" (UID: \"a13fcd99-a750-4831-a905-548b24cccf48\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.436332 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2zsb\" (UniqueName: \"kubernetes.io/projected/a13fcd99-a750-4831-a905-548b24cccf48-kube-api-access-v2zsb\") pod \"cert-manager-webhook-f4fb5df64-vwmph\" (UID: \"a13fcd99-a750-4831-a905-548b24cccf48\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.455520 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a13fcd99-a750-4831-a905-548b24cccf48-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-vwmph\" (UID: \"a13fcd99-a750-4831-a905-548b24cccf48\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.465313 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2zsb\" (UniqueName: \"kubernetes.io/projected/a13fcd99-a750-4831-a905-548b24cccf48-kube-api-access-v2zsb\") pod \"cert-manager-webhook-f4fb5df64-vwmph\" (UID: \"a13fcd99-a750-4831-a905-548b24cccf48\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.475466 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.501524 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-2-build_8bb811d9-cffa-4113-a542-00ecedd4efbc/git-clone/0.log" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.501793 4829 generic.go:334] "Generic (PLEG): container finished" podID="8bb811d9-cffa-4113-a542-00ecedd4efbc" containerID="d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012" exitCode=1 Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.501882 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"8bb811d9-cffa-4113-a542-00ecedd4efbc","Type":"ContainerDied","Data":"d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012"} Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.501998 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-2-build" event={"ID":"8bb811d9-cffa-4113-a542-00ecedd4efbc","Type":"ContainerDied","Data":"4b6fdb7e4a211c2e670ad32448347de60a9d5ae9aeb3e7ef41e034084e43b699"} Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.502068 4829 scope.go:117] "RemoveContainer" containerID="d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.502232 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-2-build" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.525749 4829 scope.go:117] "RemoveContainer" containerID="d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012" Jan 22 00:19:30 crc kubenswrapper[4829]: E0122 00:19:30.526205 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012\": container with ID starting with d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012 not found: ID does not exist" containerID="d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.526254 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012"} err="failed to get container status \"d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012\": rpc error: code = NotFound desc = could not find container \"d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012\": container with ID starting with d134fc9df2fec84a3c849fa5ec0edbe469bf6f6de3b4b347ca40df3a91087012 not found: ID does not exist" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.536506 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.550073 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-2-build"] Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.560454 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bb811d9-cffa-4113-a542-00ecedd4efbc" path="/var/lib/kubelet/pods/8bb811d9-cffa-4113-a542-00ecedd4efbc/volumes" Jan 22 00:19:30 crc kubenswrapper[4829]: I0122 00:19:30.709884 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-vwmph"] Jan 22 00:19:31 crc kubenswrapper[4829]: I0122 00:19:31.513718 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" event={"ID":"a13fcd99-a750-4831-a905-548b24cccf48","Type":"ContainerStarted","Data":"52fe8b64c821d1e894ce919f05392243a21830c4cdfd90b1b5e96c893aea3b79"} Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.087802 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8"] Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.088734 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.095564 4829 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-rbv2w" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.104299 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8"] Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.263018 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c88bc26a-1dc4-459d-8e47-86eafb891058-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-6rdp8\" (UID: \"c88bc26a-1dc4-459d-8e47-86eafb891058\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.263173 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2lqp\" (UniqueName: \"kubernetes.io/projected/c88bc26a-1dc4-459d-8e47-86eafb891058-kube-api-access-s2lqp\") pod \"cert-manager-cainjector-855d9ccff4-6rdp8\" (UID: \"c88bc26a-1dc4-459d-8e47-86eafb891058\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.364570 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c88bc26a-1dc4-459d-8e47-86eafb891058-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-6rdp8\" (UID: \"c88bc26a-1dc4-459d-8e47-86eafb891058\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.364627 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2lqp\" (UniqueName: \"kubernetes.io/projected/c88bc26a-1dc4-459d-8e47-86eafb891058-kube-api-access-s2lqp\") pod \"cert-manager-cainjector-855d9ccff4-6rdp8\" (UID: \"c88bc26a-1dc4-459d-8e47-86eafb891058\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.387202 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c88bc26a-1dc4-459d-8e47-86eafb891058-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-6rdp8\" (UID: \"c88bc26a-1dc4-459d-8e47-86eafb891058\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.404512 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2lqp\" (UniqueName: \"kubernetes.io/projected/c88bc26a-1dc4-459d-8e47-86eafb891058-kube-api-access-s2lqp\") pod \"cert-manager-cainjector-855d9ccff4-6rdp8\" (UID: \"c88bc26a-1dc4-459d-8e47-86eafb891058\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.415737 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" Jan 22 00:19:32 crc kubenswrapper[4829]: I0122 00:19:32.890053 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8"] Jan 22 00:19:33 crc kubenswrapper[4829]: I0122 00:19:33.525745 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" event={"ID":"c88bc26a-1dc4-459d-8e47-86eafb891058","Type":"ContainerStarted","Data":"b10f864eb766b8ca2161f03f22b4d6acbe158cdc2e8108b5e4d6a3efe4b2bcf6"} Jan 22 00:19:34 crc kubenswrapper[4829]: I0122 00:19:34.658508 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:19:34 crc kubenswrapper[4829]: I0122 00:19:34.658589 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.587294 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" event={"ID":"c88bc26a-1dc4-459d-8e47-86eafb891058","Type":"ContainerStarted","Data":"19fb4eace0f1a337ff06dcd53e8f2e57183edb525ed26cbc193d5b9dd4b34282"} Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.590661 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" event={"ID":"a13fcd99-a750-4831-a905-548b24cccf48","Type":"ContainerStarted","Data":"6f3717b4da9c896f666fc82e774c3d36a9b0f8c543c14da5fca6e5b06a074140"} Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.590939 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.603399 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-6rdp8" podStartSLOduration=1.740356803 podStartE2EDuration="7.60338069s" podCreationTimestamp="2026-01-22 00:19:32 +0000 UTC" firstStartedPulling="2026-01-22 00:19:32.91744153 +0000 UTC m=+750.953683442" lastFinishedPulling="2026-01-22 00:19:38.780465377 +0000 UTC m=+756.816707329" observedRunningTime="2026-01-22 00:19:39.601912582 +0000 UTC m=+757.638154544" watchObservedRunningTime="2026-01-22 00:19:39.60338069 +0000 UTC m=+757.639622612" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.623516 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" podStartSLOduration=1.679817891 podStartE2EDuration="9.623497823s" podCreationTimestamp="2026-01-22 00:19:30 +0000 UTC" firstStartedPulling="2026-01-22 00:19:30.712972188 +0000 UTC m=+748.749214100" lastFinishedPulling="2026-01-22 00:19:38.65665212 +0000 UTC m=+756.692894032" observedRunningTime="2026-01-22 00:19:39.622785816 +0000 UTC m=+757.659027738" watchObservedRunningTime="2026-01-22 00:19:39.623497823 +0000 UTC m=+757.659739745" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.975370 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.977111 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.979474 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-3-global-ca" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.981152 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-3-sys-config" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.981315 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.982187 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-3-ca" Jan 22 00:19:39 crc kubenswrapper[4829]: I0122 00:19:39.996777 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089296 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089344 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildcachedir\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089366 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildworkdir\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089391 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089413 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-node-pullsecrets\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089433 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwljm\" (UniqueName: \"kubernetes.io/projected/fe443276-5eac-4d64-ad57-1348ef3d4dff-kube-api-access-gwljm\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089459 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-blob-cache\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089481 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-root\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089557 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-run\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089587 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089618 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-proxy-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.089667 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-system-configs\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191418 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwljm\" (UniqueName: \"kubernetes.io/projected/fe443276-5eac-4d64-ad57-1348ef3d4dff-kube-api-access-gwljm\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191484 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-blob-cache\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191512 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-root\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191593 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-run\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191627 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191657 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-proxy-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191682 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-system-configs\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191728 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191750 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildcachedir\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191771 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildworkdir\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191798 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191820 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-node-pullsecrets\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.191922 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-node-pullsecrets\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.192083 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-root\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.192078 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-blob-cache\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.192389 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-run\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.192495 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildcachedir\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.192526 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-proxy-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.192533 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildworkdir\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.192630 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-system-configs\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.193243 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-ca-bundles\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.197361 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.197414 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.209532 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwljm\" (UniqueName: \"kubernetes.io/projected/fe443276-5eac-4d64-ad57-1348ef3d4dff-kube-api-access-gwljm\") pod \"service-telemetry-operator-3-build\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.296900 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.540442 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 22 00:19:40 crc kubenswrapper[4829]: I0122 00:19:40.597952 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"fe443276-5eac-4d64-ad57-1348ef3d4dff","Type":"ContainerStarted","Data":"b11fa6dd72e03c92596b63513774c429cb88d2482e78bc66bf0346ebc8b10445"} Jan 22 00:19:41 crc kubenswrapper[4829]: I0122 00:19:41.603991 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"fe443276-5eac-4d64-ad57-1348ef3d4dff","Type":"ContainerStarted","Data":"66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468"} Jan 22 00:19:41 crc kubenswrapper[4829]: E0122 00:19:41.675964 4829 server.go:309] "Unable to authenticate the request due to an error" err="verifying certificate SN=6956309506909475776, SKID=, AKID=58:29:81:64:0A:06:19:68:BE:C7:23:6D:77:93:1F:D0:C2:75:D5:20 failed: x509: certificate signed by unknown authority" Jan 22 00:19:42 crc kubenswrapper[4829]: I0122 00:19:42.703387 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.411460 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qx4r2"] Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.412675 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.415880 4829 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-rgm5l" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.432797 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qx4r2"] Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.454463 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lngl\" (UniqueName: \"kubernetes.io/projected/12fe7f95-6902-42ad-82b7-e7162826af10-kube-api-access-6lngl\") pod \"cert-manager-86cb77c54b-qx4r2\" (UID: \"12fe7f95-6902-42ad-82b7-e7162826af10\") " pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.454678 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/12fe7f95-6902-42ad-82b7-e7162826af10-bound-sa-token\") pod \"cert-manager-86cb77c54b-qx4r2\" (UID: \"12fe7f95-6902-42ad-82b7-e7162826af10\") " pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.556027 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lngl\" (UniqueName: \"kubernetes.io/projected/12fe7f95-6902-42ad-82b7-e7162826af10-kube-api-access-6lngl\") pod \"cert-manager-86cb77c54b-qx4r2\" (UID: \"12fe7f95-6902-42ad-82b7-e7162826af10\") " pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.556136 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/12fe7f95-6902-42ad-82b7-e7162826af10-bound-sa-token\") pod \"cert-manager-86cb77c54b-qx4r2\" (UID: \"12fe7f95-6902-42ad-82b7-e7162826af10\") " pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.575741 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lngl\" (UniqueName: \"kubernetes.io/projected/12fe7f95-6902-42ad-82b7-e7162826af10-kube-api-access-6lngl\") pod \"cert-manager-86cb77c54b-qx4r2\" (UID: \"12fe7f95-6902-42ad-82b7-e7162826af10\") " pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.582881 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/12fe7f95-6902-42ad-82b7-e7162826af10-bound-sa-token\") pod \"cert-manager-86cb77c54b-qx4r2\" (UID: \"12fe7f95-6902-42ad-82b7-e7162826af10\") " pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.628761 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/service-telemetry-operator-3-build" podUID="fe443276-5eac-4d64-ad57-1348ef3d4dff" containerName="git-clone" containerID="cri-o://66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468" gracePeriod=30 Jan 22 00:19:43 crc kubenswrapper[4829]: I0122 00:19:43.760758 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-qx4r2" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.029005 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-qx4r2"] Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.034880 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-3-build_fe443276-5eac-4d64-ad57-1348ef3d4dff/git-clone/0.log" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.034987 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064587 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildcachedir\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064645 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-push\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064679 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-blob-cache\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064705 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwljm\" (UniqueName: \"kubernetes.io/projected/fe443276-5eac-4d64-ad57-1348ef3d4dff-kube-api-access-gwljm\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064737 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-pull\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064766 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-system-configs\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064812 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-ca-bundles\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064830 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-node-pullsecrets\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064851 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-run\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064901 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-root\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064940 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-proxy-ca-bundles\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.064974 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildworkdir\") pod \"fe443276-5eac-4d64-ad57-1348ef3d4dff\" (UID: \"fe443276-5eac-4d64-ad57-1348ef3d4dff\") " Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.065443 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.065481 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.065671 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.065856 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.065876 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.066352 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.066429 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.066447 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.066519 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.072127 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.072196 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe443276-5eac-4d64-ad57-1348ef3d4dff-kube-api-access-gwljm" (OuterVolumeSpecName: "kube-api-access-gwljm") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "kube-api-access-gwljm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.073226 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "fe443276-5eac-4d64-ad57-1348ef3d4dff" (UID: "fe443276-5eac-4d64-ad57-1348ef3d4dff"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165633 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165669 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165678 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwljm\" (UniqueName: \"kubernetes.io/projected/fe443276-5eac-4d64-ad57-1348ef3d4dff-kube-api-access-gwljm\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165687 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/fe443276-5eac-4d64-ad57-1348ef3d4dff-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165696 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165705 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165713 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165721 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165730 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165738 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe443276-5eac-4d64-ad57-1348ef3d4dff-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165746 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.165754 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/fe443276-5eac-4d64-ad57-1348ef3d4dff-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.635622 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-3-build_fe443276-5eac-4d64-ad57-1348ef3d4dff/git-clone/0.log" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.635906 4829 generic.go:334] "Generic (PLEG): container finished" podID="fe443276-5eac-4d64-ad57-1348ef3d4dff" containerID="66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468" exitCode=1 Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.635961 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"fe443276-5eac-4d64-ad57-1348ef3d4dff","Type":"ContainerDied","Data":"66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468"} Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.635990 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-3-build" event={"ID":"fe443276-5eac-4d64-ad57-1348ef3d4dff","Type":"ContainerDied","Data":"b11fa6dd72e03c92596b63513774c429cb88d2482e78bc66bf0346ebc8b10445"} Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.636010 4829 scope.go:117] "RemoveContainer" containerID="66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.636125 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-3-build" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.639831 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-qx4r2" event={"ID":"12fe7f95-6902-42ad-82b7-e7162826af10","Type":"ContainerStarted","Data":"5d69342b94d6ca2261e7f0d5b8d71d161ed7013cabf2335bee5276cbb55cf90a"} Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.640927 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-qx4r2" event={"ID":"12fe7f95-6902-42ad-82b7-e7162826af10","Type":"ContainerStarted","Data":"8d7d4f7b1425a9c42f7362db53c63d53b0c493c0b3adb86532dea28564b90583"} Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.657674 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-qx4r2" podStartSLOduration=1.6576498339999999 podStartE2EDuration="1.657649834s" podCreationTimestamp="2026-01-22 00:19:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:19:44.655285344 +0000 UTC m=+762.691527276" watchObservedRunningTime="2026-01-22 00:19:44.657649834 +0000 UTC m=+762.693891756" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.661075 4829 scope.go:117] "RemoveContainer" containerID="66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468" Jan 22 00:19:44 crc kubenswrapper[4829]: E0122 00:19:44.662667 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468\": container with ID starting with 66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468 not found: ID does not exist" containerID="66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.662703 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468"} err="failed to get container status \"66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468\": rpc error: code = NotFound desc = could not find container \"66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468\": container with ID starting with 66bdf12db795c9a7899dfb82b690fe18b44d9aa620be85e6ed3037f1b0677468 not found: ID does not exist" Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.692618 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 22 00:19:44 crc kubenswrapper[4829]: I0122 00:19:44.697031 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/service-telemetry-operator-3-build"] Jan 22 00:19:45 crc kubenswrapper[4829]: I0122 00:19:45.480089 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-vwmph" Jan 22 00:19:46 crc kubenswrapper[4829]: I0122 00:19:46.568521 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe443276-5eac-4d64-ad57-1348ef3d4dff" path="/var/lib/kubelet/pods/fe443276-5eac-4d64-ad57-1348ef3d4dff/volumes" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.263488 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 22 00:19:54 crc kubenswrapper[4829]: E0122 00:19:54.264398 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe443276-5eac-4d64-ad57-1348ef3d4dff" containerName="git-clone" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.264429 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe443276-5eac-4d64-ad57-1348ef3d4dff" containerName="git-clone" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.264779 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe443276-5eac-4d64-ad57-1348ef3d4dff" containerName="git-clone" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.266835 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.271807 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-4-ca" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.272491 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-4-sys-config" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.272641 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-4-global-ca" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.272654 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.303176 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369145 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369201 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369272 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-root\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369513 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildcachedir\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369601 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369766 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqslc\" (UniqueName: \"kubernetes.io/projected/8fceabc2-5259-476d-9079-8e2b72ab18ed-kube-api-access-qqslc\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369887 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-run\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369917 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-proxy-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369952 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-node-pullsecrets\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.369975 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-blob-cache\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.370022 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-system-configs\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.370053 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildworkdir\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471349 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-run\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471460 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-proxy-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471589 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-node-pullsecrets\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471662 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-blob-cache\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471718 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-system-configs\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471759 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildworkdir\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471784 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-node-pullsecrets\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471829 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471895 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-root\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471938 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.471975 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildcachedir\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.472154 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-run\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.472246 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildworkdir\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.472271 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildcachedir\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.472768 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-system-configs\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.472939 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-proxy-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.473124 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-ca-bundles\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.473392 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-blob-cache\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.473492 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.473614 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqslc\" (UniqueName: \"kubernetes.io/projected/8fceabc2-5259-476d-9079-8e2b72ab18ed-kube-api-access-qqslc\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.473618 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-root\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.480452 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-pull\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.481252 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-push\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.494684 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqslc\" (UniqueName: \"kubernetes.io/projected/8fceabc2-5259-476d-9079-8e2b72ab18ed-kube-api-access-qqslc\") pod \"service-telemetry-operator-4-build\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:54 crc kubenswrapper[4829]: I0122 00:19:54.603244 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:19:55 crc kubenswrapper[4829]: I0122 00:19:55.075454 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-4-build"] Jan 22 00:19:55 crc kubenswrapper[4829]: I0122 00:19:55.649627 4829 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 22 00:19:55 crc kubenswrapper[4829]: I0122 00:19:55.742320 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"8fceabc2-5259-476d-9079-8e2b72ab18ed","Type":"ContainerStarted","Data":"956c0ccd5b0a2596392bccbb1d9e8bd006fdbca575c9381100c93c015f8f9b34"} Jan 22 00:19:55 crc kubenswrapper[4829]: I0122 00:19:55.742372 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"8fceabc2-5259-476d-9079-8e2b72ab18ed","Type":"ContainerStarted","Data":"2ec9d1a868ad51d2a8e7c5f03038ef1548bda7be60f045a03ccfaf951f1e4813"} Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.664239 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.665170 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.665254 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.666653 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a6767ee33d3f8536c661de53fec59c45e35b5827099605c67df4196857f9939"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.666764 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://1a6767ee33d3f8536c661de53fec59c45e35b5827099605c67df4196857f9939" gracePeriod=600 Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.803771 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="1a6767ee33d3f8536c661de53fec59c45e35b5827099605c67df4196857f9939" exitCode=0 Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.803851 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"1a6767ee33d3f8536c661de53fec59c45e35b5827099605c67df4196857f9939"} Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.803891 4829 scope.go:117] "RemoveContainer" containerID="cb30e41ea57ec072bdbdc7d1adc9b52feca9581b84ae5cf109ff0d3fe8f78fec" Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.806353 4829 generic.go:334] "Generic (PLEG): container finished" podID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerID="956c0ccd5b0a2596392bccbb1d9e8bd006fdbca575c9381100c93c015f8f9b34" exitCode=0 Jan 22 00:20:04 crc kubenswrapper[4829]: I0122 00:20:04.806392 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"8fceabc2-5259-476d-9079-8e2b72ab18ed","Type":"ContainerDied","Data":"956c0ccd5b0a2596392bccbb1d9e8bd006fdbca575c9381100c93c015f8f9b34"} Jan 22 00:20:05 crc kubenswrapper[4829]: I0122 00:20:05.817279 4829 generic.go:334] "Generic (PLEG): container finished" podID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerID="aa83b9d716e1ec2c686deb18185f72c372f8ba25b7080e32f0cd9b83f0fcb739" exitCode=0 Jan 22 00:20:05 crc kubenswrapper[4829]: I0122 00:20:05.817363 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"8fceabc2-5259-476d-9079-8e2b72ab18ed","Type":"ContainerDied","Data":"aa83b9d716e1ec2c686deb18185f72c372f8ba25b7080e32f0cd9b83f0fcb739"} Jan 22 00:20:05 crc kubenswrapper[4829]: I0122 00:20:05.822779 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"3fcfdecd12f2ea6bd608be28228c0387268ca752f7a44968b5bed6691aacbbf6"} Jan 22 00:20:05 crc kubenswrapper[4829]: I0122 00:20:05.875366 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-4-build_8fceabc2-5259-476d-9079-8e2b72ab18ed/manage-dockerfile/0.log" Jan 22 00:20:06 crc kubenswrapper[4829]: I0122 00:20:06.835066 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"8fceabc2-5259-476d-9079-8e2b72ab18ed","Type":"ContainerStarted","Data":"beee69ee0705d1c96351279575e028ab5232f06a30b563a8842ed8c572babe1f"} Jan 22 00:20:06 crc kubenswrapper[4829]: I0122 00:20:06.875860 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-4-build" podStartSLOduration=12.875838892 podStartE2EDuration="12.875838892s" podCreationTimestamp="2026-01-22 00:19:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:20:06.874708546 +0000 UTC m=+784.910950478" watchObservedRunningTime="2026-01-22 00:20:06.875838892 +0000 UTC m=+784.912080824" Jan 22 00:21:49 crc kubenswrapper[4829]: I0122 00:21:49.537513 4829 generic.go:334] "Generic (PLEG): container finished" podID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerID="beee69ee0705d1c96351279575e028ab5232f06a30b563a8842ed8c572babe1f" exitCode=0 Jan 22 00:21:49 crc kubenswrapper[4829]: I0122 00:21:49.537673 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"8fceabc2-5259-476d-9079-8e2b72ab18ed","Type":"ContainerDied","Data":"beee69ee0705d1c96351279575e028ab5232f06a30b563a8842ed8c572babe1f"} Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.800339 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897348 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-proxy-ca-bundles\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897387 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildworkdir\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897416 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-push\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897444 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqslc\" (UniqueName: \"kubernetes.io/projected/8fceabc2-5259-476d-9079-8e2b72ab18ed-kube-api-access-qqslc\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897488 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-run\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897508 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-pull\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897526 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-system-configs\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897602 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-blob-cache\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897641 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-node-pullsecrets\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897684 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildcachedir\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897721 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-root\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897759 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-ca-bundles\") pod \"8fceabc2-5259-476d-9079-8e2b72ab18ed\" (UID: \"8fceabc2-5259-476d-9079-8e2b72ab18ed\") " Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897818 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.897917 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.898222 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.898332 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.898363 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.898376 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.898770 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.898881 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.898885 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.903739 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.903764 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fceabc2-5259-476d-9079-8e2b72ab18ed-kube-api-access-qqslc" (OuterVolumeSpecName: "kube-api-access-qqslc") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "kube-api-access-qqslc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.903813 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.937507 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.999191 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.999229 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.999238 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.999248 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.999258 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/8fceabc2-5259-476d-9079-8e2b72ab18ed-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.999266 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqslc\" (UniqueName: \"kubernetes.io/projected/8fceabc2-5259-476d-9079-8e2b72ab18ed-kube-api-access-qqslc\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:50 crc kubenswrapper[4829]: I0122 00:21:50.999277 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:51 crc kubenswrapper[4829]: I0122 00:21:51.081573 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:21:51 crc kubenswrapper[4829]: I0122 00:21:51.099990 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:51 crc kubenswrapper[4829]: I0122 00:21:51.555708 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-4-build" event={"ID":"8fceabc2-5259-476d-9079-8e2b72ab18ed","Type":"ContainerDied","Data":"2ec9d1a868ad51d2a8e7c5f03038ef1548bda7be60f045a03ccfaf951f1e4813"} Jan 22 00:21:51 crc kubenswrapper[4829]: I0122 00:21:51.555750 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ec9d1a868ad51d2a8e7c5f03038ef1548bda7be60f045a03ccfaf951f1e4813" Jan 22 00:21:51 crc kubenswrapper[4829]: I0122 00:21:51.555885 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-4-build" Jan 22 00:21:53 crc kubenswrapper[4829]: I0122 00:21:53.060674 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "8fceabc2-5259-476d-9079-8e2b72ab18ed" (UID: "8fceabc2-5259-476d-9079-8e2b72ab18ed"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:21:53 crc kubenswrapper[4829]: I0122 00:21:53.128008 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/8fceabc2-5259-476d-9079-8e2b72ab18ed-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.261592 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Jan 22 00:21:55 crc kubenswrapper[4829]: E0122 00:21:55.261878 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerName="git-clone" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.261894 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerName="git-clone" Jan 22 00:21:55 crc kubenswrapper[4829]: E0122 00:21:55.261914 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerName="manage-dockerfile" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.261924 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerName="manage-dockerfile" Jan 22 00:21:55 crc kubenswrapper[4829]: E0122 00:21:55.261934 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerName="docker-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.261942 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerName="docker-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.262092 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fceabc2-5259-476d-9079-8e2b72ab18ed" containerName="docker-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.262863 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.268798 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-ca" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.269184 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-global-ca" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.269353 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.269583 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-1-sys-config" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.276208 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.354759 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355288 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355326 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355371 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f6cb\" (UniqueName: \"kubernetes.io/projected/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-kube-api-access-4f6cb\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355399 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355632 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-push\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355754 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355859 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355951 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.355989 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.356098 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.356128 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.457822 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-push\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.457922 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458013 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458090 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458134 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458215 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-node-pullsecrets\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458229 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458270 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458382 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458440 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458490 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458623 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f6cb\" (UniqueName: \"kubernetes.io/projected/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-kube-api-access-4f6cb\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.458666 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.459035 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.459236 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-system-configs\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.459297 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildcachedir\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.459434 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildworkdir\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.459779 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-run\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.459972 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-proxy-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.460525 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-root\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.460870 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-ca-bundles\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.465840 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-push\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.471871 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-pull\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.482737 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f6cb\" (UniqueName: \"kubernetes.io/projected/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-kube-api-access-4f6cb\") pod \"smart-gateway-operator-1-build\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:55 crc kubenswrapper[4829]: I0122 00:21:55.586989 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:21:56 crc kubenswrapper[4829]: I0122 00:21:56.003973 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Jan 22 00:21:56 crc kubenswrapper[4829]: I0122 00:21:56.586458 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b","Type":"ContainerStarted","Data":"fbf0f9e930c163c507fc1eec7b10855e034bd7cc67552ff1736b539625408f36"} Jan 22 00:21:56 crc kubenswrapper[4829]: I0122 00:21:56.586586 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b","Type":"ContainerStarted","Data":"e2aa31daa78942fa724e8d4488ee6d42cca5a2bcf615c919dcea6728c3c68389"} Jan 22 00:21:57 crc kubenswrapper[4829]: I0122 00:21:57.596990 4829 generic.go:334] "Generic (PLEG): container finished" podID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerID="fbf0f9e930c163c507fc1eec7b10855e034bd7cc67552ff1736b539625408f36" exitCode=0 Jan 22 00:21:57 crc kubenswrapper[4829]: I0122 00:21:57.597077 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b","Type":"ContainerDied","Data":"fbf0f9e930c163c507fc1eec7b10855e034bd7cc67552ff1736b539625408f36"} Jan 22 00:21:58 crc kubenswrapper[4829]: I0122 00:21:58.609912 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b","Type":"ContainerStarted","Data":"16679620436c6e67549648c8164ef715c93413c8050860f1b83645f6b1711efe"} Jan 22 00:21:58 crc kubenswrapper[4829]: I0122 00:21:58.644267 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-1-build" podStartSLOduration=3.64424642 podStartE2EDuration="3.64424642s" podCreationTimestamp="2026-01-22 00:21:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:21:58.640192023 +0000 UTC m=+896.676433945" watchObservedRunningTime="2026-01-22 00:21:58.64424642 +0000 UTC m=+896.680488332" Jan 22 00:22:04 crc kubenswrapper[4829]: I0122 00:22:04.658147 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:22:04 crc kubenswrapper[4829]: I0122 00:22:04.659043 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:22:06 crc kubenswrapper[4829]: I0122 00:22:06.141669 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Jan 22 00:22:06 crc kubenswrapper[4829]: I0122 00:22:06.142049 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/smart-gateway-operator-1-build" podUID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerName="docker-build" containerID="cri-o://16679620436c6e67549648c8164ef715c93413c8050860f1b83645f6b1711efe" gracePeriod=30 Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.822290 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.824366 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.826305 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-global-ca" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.826324 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-sys-config" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.828679 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-2-ca" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.840379 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.950788 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.950918 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.950998 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951072 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951109 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951134 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951162 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951189 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hl6w\" (UniqueName: \"kubernetes.io/projected/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-kube-api-access-2hl6w\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951266 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951301 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-push\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951328 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:07 crc kubenswrapper[4829]: I0122 00:22:07.951354 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052476 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052527 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-push\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052576 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052608 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052671 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052698 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052735 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052812 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052839 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052860 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052883 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052904 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hl6w\" (UniqueName: \"kubernetes.io/projected/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-kube-api-access-2hl6w\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.052694 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildcachedir\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.053267 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-node-pullsecrets\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.053669 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-proxy-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.053664 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-system-configs\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.053809 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-run\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.053959 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildworkdir\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.054028 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.054067 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-ca-bundles\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.054148 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-blob-cache\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.057706 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-pull\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.057711 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-push\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.072488 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hl6w\" (UniqueName: \"kubernetes.io/projected/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-kube-api-access-2hl6w\") pod \"smart-gateway-operator-2-build\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.140785 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.430063 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-2-build"] Jan 22 00:22:08 crc kubenswrapper[4829]: I0122 00:22:08.683029 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d","Type":"ContainerStarted","Data":"457c7b7e07ab74a78384b81b1eb10e86b0d941411873d2d1712dc4ca57594922"} Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.855190 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b/docker-build/0.log" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.856424 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.943965 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-run\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944043 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4f6cb\" (UniqueName: \"kubernetes.io/projected/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-kube-api-access-4f6cb\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944083 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildworkdir\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944121 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944145 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-system-configs\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944217 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildcachedir\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944270 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-push\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944306 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-node-pullsecrets\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944386 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-ca-bundles\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944437 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-root\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944491 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-pull\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944569 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-proxy-ca-bundles\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944796 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.944858 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.945012 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.945055 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.945639 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.945638 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.945726 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.945798 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.946273 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.951504 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.952528 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:22:13 crc kubenswrapper[4829]: I0122 00:22:13.952564 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-kube-api-access-4f6cb" (OuterVolumeSpecName: "kube-api-access-4f6cb") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "kube-api-access-4f6cb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046841 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046937 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046948 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4f6cb\" (UniqueName: \"kubernetes.io/projected/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-kube-api-access-4f6cb\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046959 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046968 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046977 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046987 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.046996 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.351503 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.352267 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache\") pod \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\" (UID: \"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b\") " Jan 22 00:22:14 crc kubenswrapper[4829]: W0122 00:22:14.352650 4829 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b/volumes/kubernetes.io~empty-dir/build-blob-cache Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.352757 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.430802 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-1-build_67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b/docker-build/0.log" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.431273 4829 generic.go:334] "Generic (PLEG): container finished" podID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerID="16679620436c6e67549648c8164ef715c93413c8050860f1b83645f6b1711efe" exitCode=1 Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.431329 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b","Type":"ContainerDied","Data":"16679620436c6e67549648c8164ef715c93413c8050860f1b83645f6b1711efe"} Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.431380 4829 scope.go:117] "RemoveContainer" containerID="16679620436c6e67549648c8164ef715c93413c8050860f1b83645f6b1711efe" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.453441 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.459267 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" (UID: "67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.483359 4829 scope.go:117] "RemoveContainer" containerID="fbf0f9e930c163c507fc1eec7b10855e034bd7cc67552ff1736b539625408f36" Jan 22 00:22:14 crc kubenswrapper[4829]: I0122 00:22:14.554365 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:15 crc kubenswrapper[4829]: I0122 00:22:15.443636 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d","Type":"ContainerStarted","Data":"5c8df8a3cd598257b332a1ab46a9fcaadf2727c3bfe718f446727d7d4c6b87b1"} Jan 22 00:22:15 crc kubenswrapper[4829]: I0122 00:22:15.445441 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-1-build" event={"ID":"67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b","Type":"ContainerDied","Data":"e2aa31daa78942fa724e8d4488ee6d42cca5a2bcf615c919dcea6728c3c68389"} Jan 22 00:22:15 crc kubenswrapper[4829]: I0122 00:22:15.445484 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-1-build" Jan 22 00:22:15 crc kubenswrapper[4829]: I0122 00:22:15.492300 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Jan 22 00:22:15 crc kubenswrapper[4829]: I0122 00:22:15.500402 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/smart-gateway-operator-1-build"] Jan 22 00:22:16 crc kubenswrapper[4829]: I0122 00:22:16.453419 4829 generic.go:334] "Generic (PLEG): container finished" podID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerID="5c8df8a3cd598257b332a1ab46a9fcaadf2727c3bfe718f446727d7d4c6b87b1" exitCode=0 Jan 22 00:22:16 crc kubenswrapper[4829]: I0122 00:22:16.453459 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d","Type":"ContainerDied","Data":"5c8df8a3cd598257b332a1ab46a9fcaadf2727c3bfe718f446727d7d4c6b87b1"} Jan 22 00:22:16 crc kubenswrapper[4829]: I0122 00:22:16.563712 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" path="/var/lib/kubelet/pods/67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b/volumes" Jan 22 00:22:17 crc kubenswrapper[4829]: I0122 00:22:17.461754 4829 generic.go:334] "Generic (PLEG): container finished" podID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerID="6188ea26c0a852486938d9ef2909cb9c4f7e4afd99ad2892e1a59a6927549396" exitCode=0 Jan 22 00:22:17 crc kubenswrapper[4829]: I0122 00:22:17.461824 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d","Type":"ContainerDied","Data":"6188ea26c0a852486938d9ef2909cb9c4f7e4afd99ad2892e1a59a6927549396"} Jan 22 00:22:17 crc kubenswrapper[4829]: I0122 00:22:17.508659 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-2-build_b8fd44cb-8191-4b46-8260-1bc8aa31fe6d/manage-dockerfile/0.log" Jan 22 00:22:18 crc kubenswrapper[4829]: I0122 00:22:18.471328 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d","Type":"ContainerStarted","Data":"a5391031aecbd2e04332688be98c04846b4cc8ef10587dcb48f070b905366897"} Jan 22 00:22:18 crc kubenswrapper[4829]: I0122 00:22:18.498258 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-2-build" podStartSLOduration=11.498239881 podStartE2EDuration="11.498239881s" podCreationTimestamp="2026-01-22 00:22:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:22:18.494831442 +0000 UTC m=+916.531073354" watchObservedRunningTime="2026-01-22 00:22:18.498239881 +0000 UTC m=+916.534481793" Jan 22 00:22:34 crc kubenswrapper[4829]: I0122 00:22:34.658732 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:22:34 crc kubenswrapper[4829]: I0122 00:22:34.659593 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.036434 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vg52v"] Jan 22 00:22:40 crc kubenswrapper[4829]: E0122 00:22:40.037261 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerName="docker-build" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.037280 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerName="docker-build" Jan 22 00:22:40 crc kubenswrapper[4829]: E0122 00:22:40.037299 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerName="manage-dockerfile" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.037307 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerName="manage-dockerfile" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.037466 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="67ee7f3e-18f2-4943-a3e7-9f0466aa0d4b" containerName="docker-build" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.038501 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.047762 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vg52v"] Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.219019 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crhf8\" (UniqueName: \"kubernetes.io/projected/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-kube-api-access-crhf8\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.219074 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-catalog-content\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.219328 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-utilities\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.320786 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crhf8\" (UniqueName: \"kubernetes.io/projected/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-kube-api-access-crhf8\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.320853 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-catalog-content\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.320888 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-utilities\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.321609 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-catalog-content\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.321647 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-utilities\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.344109 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crhf8\" (UniqueName: \"kubernetes.io/projected/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-kube-api-access-crhf8\") pod \"community-operators-vg52v\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.403383 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.656895 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vg52v"] Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.855066 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerStarted","Data":"ceac0871e3ff0105562fc41c2e19589ccd8788055a36c533be010ef9108a559a"} Jan 22 00:22:40 crc kubenswrapper[4829]: I0122 00:22:40.855136 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerStarted","Data":"b1345ebf7d3581a90ef45ea4d1cc404ef4c62444af302a98c0e7a0a2a041adcc"} Jan 22 00:22:41 crc kubenswrapper[4829]: I0122 00:22:41.862069 4829 generic.go:334] "Generic (PLEG): container finished" podID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerID="ceac0871e3ff0105562fc41c2e19589ccd8788055a36c533be010ef9108a559a" exitCode=0 Jan 22 00:22:41 crc kubenswrapper[4829]: I0122 00:22:41.862170 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerDied","Data":"ceac0871e3ff0105562fc41c2e19589ccd8788055a36c533be010ef9108a559a"} Jan 22 00:22:42 crc kubenswrapper[4829]: I0122 00:22:42.868494 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerStarted","Data":"f48de93f4c5944915666dd71877fda1b9ae12fe08d9b004f4e575ad18cc0a4ba"} Jan 22 00:22:43 crc kubenswrapper[4829]: I0122 00:22:43.874833 4829 generic.go:334] "Generic (PLEG): container finished" podID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerID="f48de93f4c5944915666dd71877fda1b9ae12fe08d9b004f4e575ad18cc0a4ba" exitCode=0 Jan 22 00:22:43 crc kubenswrapper[4829]: I0122 00:22:43.874869 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerDied","Data":"f48de93f4c5944915666dd71877fda1b9ae12fe08d9b004f4e575ad18cc0a4ba"} Jan 22 00:22:44 crc kubenswrapper[4829]: I0122 00:22:44.883534 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerStarted","Data":"321865f9d0bccdf5bac81019d8df37954a2b2a3d9cb9f64ca29b85adea827061"} Jan 22 00:22:44 crc kubenswrapper[4829]: I0122 00:22:44.906864 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vg52v" podStartSLOduration=2.490199898 podStartE2EDuration="4.906839817s" podCreationTimestamp="2026-01-22 00:22:40 +0000 UTC" firstStartedPulling="2026-01-22 00:22:41.864623241 +0000 UTC m=+939.900865163" lastFinishedPulling="2026-01-22 00:22:44.28126313 +0000 UTC m=+942.317505082" observedRunningTime="2026-01-22 00:22:44.902611852 +0000 UTC m=+942.938853764" watchObservedRunningTime="2026-01-22 00:22:44.906839817 +0000 UTC m=+942.943081769" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.108266 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6lcpv"] Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.110025 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.114454 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6lcpv"] Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.219820 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-catalog-content\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.220171 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw6hb\" (UniqueName: \"kubernetes.io/projected/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-kube-api-access-nw6hb\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.220199 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-utilities\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.321962 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-utilities\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.322144 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-catalog-content\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.322192 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw6hb\" (UniqueName: \"kubernetes.io/projected/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-kube-api-access-nw6hb\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.322599 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-utilities\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.322610 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-catalog-content\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.349623 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw6hb\" (UniqueName: \"kubernetes.io/projected/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-kube-api-access-nw6hb\") pod \"certified-operators-6lcpv\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.429042 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.724586 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6lcpv"] Jan 22 00:22:48 crc kubenswrapper[4829]: W0122 00:22:48.730888 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd24e27d_d91d_4682_9a16_2b9fdc3856b9.slice/crio-c59fd680c82a2fef45b87f7d8703dd5365c40e7c815238ce637b1fd1047cef33 WatchSource:0}: Error finding container c59fd680c82a2fef45b87f7d8703dd5365c40e7c815238ce637b1fd1047cef33: Status 404 returned error can't find the container with id c59fd680c82a2fef45b87f7d8703dd5365c40e7c815238ce637b1fd1047cef33 Jan 22 00:22:48 crc kubenswrapper[4829]: I0122 00:22:48.909842 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lcpv" event={"ID":"bd24e27d-d91d-4682-9a16-2b9fdc3856b9","Type":"ContainerStarted","Data":"c59fd680c82a2fef45b87f7d8703dd5365c40e7c815238ce637b1fd1047cef33"} Jan 22 00:22:49 crc kubenswrapper[4829]: I0122 00:22:49.917925 4829 generic.go:334] "Generic (PLEG): container finished" podID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerID="5197a7098073e70c7a4a3883f4793595c64eb98f7c08d62d501d8e359bea2a6e" exitCode=0 Jan 22 00:22:49 crc kubenswrapper[4829]: I0122 00:22:49.918022 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lcpv" event={"ID":"bd24e27d-d91d-4682-9a16-2b9fdc3856b9","Type":"ContainerDied","Data":"5197a7098073e70c7a4a3883f4793595c64eb98f7c08d62d501d8e359bea2a6e"} Jan 22 00:22:50 crc kubenswrapper[4829]: I0122 00:22:50.404231 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:50 crc kubenswrapper[4829]: I0122 00:22:50.404976 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:50 crc kubenswrapper[4829]: I0122 00:22:50.450892 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:50 crc kubenswrapper[4829]: I0122 00:22:50.961713 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:52 crc kubenswrapper[4829]: I0122 00:22:52.608110 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vg52v"] Jan 22 00:22:53 crc kubenswrapper[4829]: I0122 00:22:53.945951 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vg52v" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="registry-server" containerID="cri-o://321865f9d0bccdf5bac81019d8df37954a2b2a3d9cb9f64ca29b85adea827061" gracePeriod=2 Jan 22 00:22:54 crc kubenswrapper[4829]: I0122 00:22:54.965205 4829 generic.go:334] "Generic (PLEG): container finished" podID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerID="321865f9d0bccdf5bac81019d8df37954a2b2a3d9cb9f64ca29b85adea827061" exitCode=0 Jan 22 00:22:54 crc kubenswrapper[4829]: I0122 00:22:54.965286 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerDied","Data":"321865f9d0bccdf5bac81019d8df37954a2b2a3d9cb9f64ca29b85adea827061"} Jan 22 00:22:54 crc kubenswrapper[4829]: I0122 00:22:54.968053 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lcpv" event={"ID":"bd24e27d-d91d-4682-9a16-2b9fdc3856b9","Type":"ContainerStarted","Data":"dd2af77b8775e25a278fb8e93bbb2b33662535ab09a62cf1296a27c05d3cfb09"} Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.034727 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.227582 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crhf8\" (UniqueName: \"kubernetes.io/projected/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-kube-api-access-crhf8\") pod \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.227669 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-utilities\") pod \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.227782 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-catalog-content\") pod \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\" (UID: \"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4\") " Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.228856 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-utilities" (OuterVolumeSpecName: "utilities") pod "9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" (UID: "9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.234829 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-kube-api-access-crhf8" (OuterVolumeSpecName: "kube-api-access-crhf8") pod "9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" (UID: "9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4"). InnerVolumeSpecName "kube-api-access-crhf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.289528 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" (UID: "9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.329017 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crhf8\" (UniqueName: \"kubernetes.io/projected/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-kube-api-access-crhf8\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.329075 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.329089 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.977108 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vg52v" event={"ID":"9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4","Type":"ContainerDied","Data":"b1345ebf7d3581a90ef45ea4d1cc404ef4c62444af302a98c0e7a0a2a041adcc"} Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.977439 4829 scope.go:117] "RemoveContainer" containerID="321865f9d0bccdf5bac81019d8df37954a2b2a3d9cb9f64ca29b85adea827061" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.977630 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vg52v" Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.993893 4829 generic.go:334] "Generic (PLEG): container finished" podID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerID="dd2af77b8775e25a278fb8e93bbb2b33662535ab09a62cf1296a27c05d3cfb09" exitCode=0 Jan 22 00:22:55 crc kubenswrapper[4829]: I0122 00:22:55.993933 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lcpv" event={"ID":"bd24e27d-d91d-4682-9a16-2b9fdc3856b9","Type":"ContainerDied","Data":"dd2af77b8775e25a278fb8e93bbb2b33662535ab09a62cf1296a27c05d3cfb09"} Jan 22 00:22:56 crc kubenswrapper[4829]: I0122 00:22:56.010565 4829 scope.go:117] "RemoveContainer" containerID="f48de93f4c5944915666dd71877fda1b9ae12fe08d9b004f4e575ad18cc0a4ba" Jan 22 00:22:56 crc kubenswrapper[4829]: I0122 00:22:56.034565 4829 scope.go:117] "RemoveContainer" containerID="ceac0871e3ff0105562fc41c2e19589ccd8788055a36c533be010ef9108a559a" Jan 22 00:22:56 crc kubenswrapper[4829]: I0122 00:22:56.039123 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vg52v"] Jan 22 00:22:56 crc kubenswrapper[4829]: I0122 00:22:56.042708 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vg52v"] Jan 22 00:22:56 crc kubenswrapper[4829]: I0122 00:22:56.561721 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" path="/var/lib/kubelet/pods/9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4/volumes" Jan 22 00:22:57 crc kubenswrapper[4829]: I0122 00:22:57.006184 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lcpv" event={"ID":"bd24e27d-d91d-4682-9a16-2b9fdc3856b9","Type":"ContainerStarted","Data":"596873a555fdb489d71e63d29a1874ba13f2588a02a8366840870f36b5177733"} Jan 22 00:22:58 crc kubenswrapper[4829]: I0122 00:22:58.430176 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:58 crc kubenswrapper[4829]: I0122 00:22:58.431704 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:22:59 crc kubenswrapper[4829]: I0122 00:22:59.470448 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-6lcpv" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="registry-server" probeResult="failure" output=< Jan 22 00:22:59 crc kubenswrapper[4829]: timeout: failed to connect service ":50051" within 1s Jan 22 00:22:59 crc kubenswrapper[4829]: > Jan 22 00:23:04 crc kubenswrapper[4829]: I0122 00:23:04.661744 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:23:04 crc kubenswrapper[4829]: I0122 00:23:04.662377 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:23:04 crc kubenswrapper[4829]: I0122 00:23:04.662441 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:23:04 crc kubenswrapper[4829]: I0122 00:23:04.663151 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3fcfdecd12f2ea6bd608be28228c0387268ca752f7a44968b5bed6691aacbbf6"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:23:04 crc kubenswrapper[4829]: I0122 00:23:04.663207 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://3fcfdecd12f2ea6bd608be28228c0387268ca752f7a44968b5bed6691aacbbf6" gracePeriod=600 Jan 22 00:23:05 crc kubenswrapper[4829]: E0122 00:23:05.587460 4829 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf42b723d_cbe5_4bc3_8b03_f1d30d26c8fc.slice/crio-conmon-3fcfdecd12f2ea6bd608be28228c0387268ca752f7a44968b5bed6691aacbbf6.scope\": RecentStats: unable to find data in memory cache]" Jan 22 00:23:06 crc kubenswrapper[4829]: I0122 00:23:06.069827 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="3fcfdecd12f2ea6bd608be28228c0387268ca752f7a44968b5bed6691aacbbf6" exitCode=0 Jan 22 00:23:06 crc kubenswrapper[4829]: I0122 00:23:06.070011 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"3fcfdecd12f2ea6bd608be28228c0387268ca752f7a44968b5bed6691aacbbf6"} Jan 22 00:23:06 crc kubenswrapper[4829]: I0122 00:23:06.070246 4829 scope.go:117] "RemoveContainer" containerID="1a6767ee33d3f8536c661de53fec59c45e35b5827099605c67df4196857f9939" Jan 22 00:23:07 crc kubenswrapper[4829]: I0122 00:23:07.079910 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"0483a801a05aaf6e880f016f24ef4b653fbb05a78257dcb329328e91fdd090fc"} Jan 22 00:23:07 crc kubenswrapper[4829]: I0122 00:23:07.101577 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6lcpv" podStartSLOduration=12.412784906 podStartE2EDuration="19.101556526s" podCreationTimestamp="2026-01-22 00:22:48 +0000 UTC" firstStartedPulling="2026-01-22 00:22:49.921325896 +0000 UTC m=+947.957567818" lastFinishedPulling="2026-01-22 00:22:56.610097516 +0000 UTC m=+954.646339438" observedRunningTime="2026-01-22 00:22:57.032167011 +0000 UTC m=+955.068408933" watchObservedRunningTime="2026-01-22 00:23:07.101556526 +0000 UTC m=+965.137798458" Jan 22 00:23:08 crc kubenswrapper[4829]: I0122 00:23:08.500033 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:23:08 crc kubenswrapper[4829]: I0122 00:23:08.629368 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:23:08 crc kubenswrapper[4829]: I0122 00:23:08.750799 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6lcpv"] Jan 22 00:23:10 crc kubenswrapper[4829]: I0122 00:23:10.098902 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6lcpv" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="registry-server" containerID="cri-o://596873a555fdb489d71e63d29a1874ba13f2588a02a8366840870f36b5177733" gracePeriod=2 Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.107735 4829 generic.go:334] "Generic (PLEG): container finished" podID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerID="596873a555fdb489d71e63d29a1874ba13f2588a02a8366840870f36b5177733" exitCode=0 Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.107815 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lcpv" event={"ID":"bd24e27d-d91d-4682-9a16-2b9fdc3856b9","Type":"ContainerDied","Data":"596873a555fdb489d71e63d29a1874ba13f2588a02a8366840870f36b5177733"} Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.164502 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.277032 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-catalog-content\") pod \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.277580 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw6hb\" (UniqueName: \"kubernetes.io/projected/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-kube-api-access-nw6hb\") pod \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.277634 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-utilities\") pod \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\" (UID: \"bd24e27d-d91d-4682-9a16-2b9fdc3856b9\") " Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.278487 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-utilities" (OuterVolumeSpecName: "utilities") pod "bd24e27d-d91d-4682-9a16-2b9fdc3856b9" (UID: "bd24e27d-d91d-4682-9a16-2b9fdc3856b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.283977 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-kube-api-access-nw6hb" (OuterVolumeSpecName: "kube-api-access-nw6hb") pod "bd24e27d-d91d-4682-9a16-2b9fdc3856b9" (UID: "bd24e27d-d91d-4682-9a16-2b9fdc3856b9"). InnerVolumeSpecName "kube-api-access-nw6hb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.341616 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd24e27d-d91d-4682-9a16-2b9fdc3856b9" (UID: "bd24e27d-d91d-4682-9a16-2b9fdc3856b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.379204 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw6hb\" (UniqueName: \"kubernetes.io/projected/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-kube-api-access-nw6hb\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.379251 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.379265 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd24e27d-d91d-4682-9a16-2b9fdc3856b9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954164 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pbnmv"] Jan 22 00:23:11 crc kubenswrapper[4829]: E0122 00:23:11.954448 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="registry-server" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954464 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="registry-server" Jan 22 00:23:11 crc kubenswrapper[4829]: E0122 00:23:11.954484 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="extract-content" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954492 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="extract-content" Jan 22 00:23:11 crc kubenswrapper[4829]: E0122 00:23:11.954505 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="registry-server" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954514 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="registry-server" Jan 22 00:23:11 crc kubenswrapper[4829]: E0122 00:23:11.954526 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="extract-utilities" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954534 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="extract-utilities" Jan 22 00:23:11 crc kubenswrapper[4829]: E0122 00:23:11.954567 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="extract-utilities" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954575 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="extract-utilities" Jan 22 00:23:11 crc kubenswrapper[4829]: E0122 00:23:11.954589 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="extract-content" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954599 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="extract-content" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954750 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f8052a6-e0f7-4bd9-ad55-f4fcc3ce08c4" containerName="registry-server" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.954768 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" containerName="registry-server" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.955765 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:11 crc kubenswrapper[4829]: I0122 00:23:11.970495 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pbnmv"] Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.089400 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-utilities\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.089459 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-catalog-content\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.089493 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2rbw\" (UniqueName: \"kubernetes.io/projected/9b71e63a-fd51-47c2-aaee-d078155e3569-kube-api-access-s2rbw\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.115552 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6lcpv" event={"ID":"bd24e27d-d91d-4682-9a16-2b9fdc3856b9","Type":"ContainerDied","Data":"c59fd680c82a2fef45b87f7d8703dd5365c40e7c815238ce637b1fd1047cef33"} Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.115626 4829 scope.go:117] "RemoveContainer" containerID="596873a555fdb489d71e63d29a1874ba13f2588a02a8366840870f36b5177733" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.115643 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6lcpv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.135781 4829 scope.go:117] "RemoveContainer" containerID="dd2af77b8775e25a278fb8e93bbb2b33662535ab09a62cf1296a27c05d3cfb09" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.145516 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6lcpv"] Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.155532 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6lcpv"] Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.163518 4829 scope.go:117] "RemoveContainer" containerID="5197a7098073e70c7a4a3883f4793595c64eb98f7c08d62d501d8e359bea2a6e" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.190460 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-catalog-content\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.190521 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2rbw\" (UniqueName: \"kubernetes.io/projected/9b71e63a-fd51-47c2-aaee-d078155e3569-kube-api-access-s2rbw\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.190607 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-utilities\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.191094 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-utilities\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.191329 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-catalog-content\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.209039 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2rbw\" (UniqueName: \"kubernetes.io/projected/9b71e63a-fd51-47c2-aaee-d078155e3569-kube-api-access-s2rbw\") pod \"redhat-operators-pbnmv\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.270907 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.474987 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pbnmv"] Jan 22 00:23:12 crc kubenswrapper[4829]: I0122 00:23:12.564112 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd24e27d-d91d-4682-9a16-2b9fdc3856b9" path="/var/lib/kubelet/pods/bd24e27d-d91d-4682-9a16-2b9fdc3856b9/volumes" Jan 22 00:23:13 crc kubenswrapper[4829]: I0122 00:23:13.125827 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbnmv" event={"ID":"9b71e63a-fd51-47c2-aaee-d078155e3569","Type":"ContainerStarted","Data":"7a778b888c791520e8dc517253f9558d8959dc1e27b2db6d5024fc47fd1cf8a7"} Jan 22 00:23:14 crc kubenswrapper[4829]: I0122 00:23:14.134252 4829 generic.go:334] "Generic (PLEG): container finished" podID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerID="cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3" exitCode=0 Jan 22 00:23:14 crc kubenswrapper[4829]: I0122 00:23:14.134293 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbnmv" event={"ID":"9b71e63a-fd51-47c2-aaee-d078155e3569","Type":"ContainerDied","Data":"cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3"} Jan 22 00:23:14 crc kubenswrapper[4829]: I0122 00:23:14.136043 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 00:23:15 crc kubenswrapper[4829]: I0122 00:23:15.141709 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbnmv" event={"ID":"9b71e63a-fd51-47c2-aaee-d078155e3569","Type":"ContainerStarted","Data":"0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b"} Jan 22 00:23:16 crc kubenswrapper[4829]: I0122 00:23:16.151242 4829 generic.go:334] "Generic (PLEG): container finished" podID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerID="0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b" exitCode=0 Jan 22 00:23:16 crc kubenswrapper[4829]: I0122 00:23:16.151569 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbnmv" event={"ID":"9b71e63a-fd51-47c2-aaee-d078155e3569","Type":"ContainerDied","Data":"0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b"} Jan 22 00:23:17 crc kubenswrapper[4829]: I0122 00:23:17.159743 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbnmv" event={"ID":"9b71e63a-fd51-47c2-aaee-d078155e3569","Type":"ContainerStarted","Data":"c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18"} Jan 22 00:23:17 crc kubenswrapper[4829]: I0122 00:23:17.187002 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pbnmv" podStartSLOduration=3.5407377110000002 podStartE2EDuration="6.186982732s" podCreationTimestamp="2026-01-22 00:23:11 +0000 UTC" firstStartedPulling="2026-01-22 00:23:14.135816212 +0000 UTC m=+972.172058124" lastFinishedPulling="2026-01-22 00:23:16.782061233 +0000 UTC m=+974.818303145" observedRunningTime="2026-01-22 00:23:17.184151041 +0000 UTC m=+975.220392973" watchObservedRunningTime="2026-01-22 00:23:17.186982732 +0000 UTC m=+975.223224644" Jan 22 00:23:22 crc kubenswrapper[4829]: I0122 00:23:22.272468 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:22 crc kubenswrapper[4829]: I0122 00:23:22.272918 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:23 crc kubenswrapper[4829]: I0122 00:23:23.315046 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pbnmv" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="registry-server" probeResult="failure" output=< Jan 22 00:23:23 crc kubenswrapper[4829]: timeout: failed to connect service ":50051" within 1s Jan 22 00:23:23 crc kubenswrapper[4829]: > Jan 22 00:23:32 crc kubenswrapper[4829]: I0122 00:23:32.349799 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:32 crc kubenswrapper[4829]: I0122 00:23:32.395929 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:32 crc kubenswrapper[4829]: I0122 00:23:32.591813 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pbnmv"] Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.285940 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pbnmv" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="registry-server" containerID="cri-o://c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18" gracePeriod=2 Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.672200 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.813697 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-catalog-content\") pod \"9b71e63a-fd51-47c2-aaee-d078155e3569\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.813798 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-utilities\") pod \"9b71e63a-fd51-47c2-aaee-d078155e3569\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.813973 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2rbw\" (UniqueName: \"kubernetes.io/projected/9b71e63a-fd51-47c2-aaee-d078155e3569-kube-api-access-s2rbw\") pod \"9b71e63a-fd51-47c2-aaee-d078155e3569\" (UID: \"9b71e63a-fd51-47c2-aaee-d078155e3569\") " Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.815782 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-utilities" (OuterVolumeSpecName: "utilities") pod "9b71e63a-fd51-47c2-aaee-d078155e3569" (UID: "9b71e63a-fd51-47c2-aaee-d078155e3569"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.823294 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b71e63a-fd51-47c2-aaee-d078155e3569-kube-api-access-s2rbw" (OuterVolumeSpecName: "kube-api-access-s2rbw") pod "9b71e63a-fd51-47c2-aaee-d078155e3569" (UID: "9b71e63a-fd51-47c2-aaee-d078155e3569"). InnerVolumeSpecName "kube-api-access-s2rbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.915700 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2rbw\" (UniqueName: \"kubernetes.io/projected/9b71e63a-fd51-47c2-aaee-d078155e3569-kube-api-access-s2rbw\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.915755 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:34 crc kubenswrapper[4829]: I0122 00:23:34.963575 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b71e63a-fd51-47c2-aaee-d078155e3569" (UID: "9b71e63a-fd51-47c2-aaee-d078155e3569"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.016945 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b71e63a-fd51-47c2-aaee-d078155e3569-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.297422 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pbnmv" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.297508 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbnmv" event={"ID":"9b71e63a-fd51-47c2-aaee-d078155e3569","Type":"ContainerDied","Data":"c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18"} Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.297651 4829 scope.go:117] "RemoveContainer" containerID="c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.297904 4829 generic.go:334] "Generic (PLEG): container finished" podID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerID="c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18" exitCode=0 Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.298088 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pbnmv" event={"ID":"9b71e63a-fd51-47c2-aaee-d078155e3569","Type":"ContainerDied","Data":"7a778b888c791520e8dc517253f9558d8959dc1e27b2db6d5024fc47fd1cf8a7"} Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.319207 4829 scope.go:117] "RemoveContainer" containerID="0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.340505 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pbnmv"] Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.347569 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pbnmv"] Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.355181 4829 scope.go:117] "RemoveContainer" containerID="cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.375248 4829 scope.go:117] "RemoveContainer" containerID="c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18" Jan 22 00:23:35 crc kubenswrapper[4829]: E0122 00:23:35.375972 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18\": container with ID starting with c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18 not found: ID does not exist" containerID="c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.376018 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18"} err="failed to get container status \"c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18\": rpc error: code = NotFound desc = could not find container \"c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18\": container with ID starting with c34c85f9cadab9d0a421bec212511aca8b6a44697bf69caefdb1f0c25f3d0d18 not found: ID does not exist" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.376048 4829 scope.go:117] "RemoveContainer" containerID="0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b" Jan 22 00:23:35 crc kubenswrapper[4829]: E0122 00:23:35.376691 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b\": container with ID starting with 0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b not found: ID does not exist" containerID="0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.376761 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b"} err="failed to get container status \"0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b\": rpc error: code = NotFound desc = could not find container \"0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b\": container with ID starting with 0abb2b56235be92574dfece1ac2d5e3bac4a820a8712f2c7d03f0ffc6badd75b not found: ID does not exist" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.376803 4829 scope.go:117] "RemoveContainer" containerID="cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3" Jan 22 00:23:35 crc kubenswrapper[4829]: E0122 00:23:35.377160 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3\": container with ID starting with cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3 not found: ID does not exist" containerID="cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3" Jan 22 00:23:35 crc kubenswrapper[4829]: I0122 00:23:35.377187 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3"} err="failed to get container status \"cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3\": rpc error: code = NotFound desc = could not find container \"cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3\": container with ID starting with cd69d25abe01ca6816b8c0c6d4009231b6cec2f2af3c32f37aaec332b3015bd3 not found: ID does not exist" Jan 22 00:23:36 crc kubenswrapper[4829]: I0122 00:23:36.565219 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" path="/var/lib/kubelet/pods/9b71e63a-fd51-47c2-aaee-d078155e3569/volumes" Jan 22 00:23:52 crc kubenswrapper[4829]: I0122 00:23:52.431349 4829 generic.go:334] "Generic (PLEG): container finished" podID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerID="a5391031aecbd2e04332688be98c04846b4cc8ef10587dcb48f070b905366897" exitCode=0 Jan 22 00:23:52 crc kubenswrapper[4829]: I0122 00:23:52.431445 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d","Type":"ContainerDied","Data":"a5391031aecbd2e04332688be98c04846b4cc8ef10587dcb48f070b905366897"} Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.780427 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874185 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildworkdir\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874317 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-node-pullsecrets\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874378 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-ca-bundles\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874427 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-system-configs\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874414 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874503 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-pull\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874623 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-blob-cache\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874668 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildcachedir\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874696 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-proxy-ca-bundles\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874728 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-run\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874754 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hl6w\" (UniqueName: \"kubernetes.io/projected/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-kube-api-access-2hl6w\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874783 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874814 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-push\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.874895 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.875229 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.875249 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.875858 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.875987 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.876061 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.877839 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.879559 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.881248 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.889081 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-kube-api-access-2hl6w" (OuterVolumeSpecName: "kube-api-access-2hl6w") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "kube-api-access-2hl6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.892434 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976143 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976187 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976200 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976211 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976223 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976233 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hl6w\" (UniqueName: \"kubernetes.io/projected/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-kube-api-access-2hl6w\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976244 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:53 crc kubenswrapper[4829]: I0122 00:23:53.976255 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:54 crc kubenswrapper[4829]: I0122 00:23:54.096868 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:54 crc kubenswrapper[4829]: I0122 00:23:54.177924 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:54 crc kubenswrapper[4829]: I0122 00:23:54.453432 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-2-build" event={"ID":"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d","Type":"ContainerDied","Data":"457c7b7e07ab74a78384b81b1eb10e86b0d941411873d2d1712dc4ca57594922"} Jan 22 00:23:54 crc kubenswrapper[4829]: I0122 00:23:54.453496 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="457c7b7e07ab74a78384b81b1eb10e86b0d941411873d2d1712dc4ca57594922" Jan 22 00:23:54 crc kubenswrapper[4829]: I0122 00:23:54.453768 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-2-build" Jan 22 00:23:56 crc kubenswrapper[4829]: I0122 00:23:56.412636 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:56 crc kubenswrapper[4829]: I0122 00:23:56.413721 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root\") pod \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\" (UID: \"b8fd44cb-8191-4b46-8260-1bc8aa31fe6d\") " Jan 22 00:23:56 crc kubenswrapper[4829]: W0122 00:23:56.413878 4829 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d/volumes/kubernetes.io~empty-dir/container-storage-root Jan 22 00:23:56 crc kubenswrapper[4829]: I0122 00:23:56.413900 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" (UID: "b8fd44cb-8191-4b46-8260-1bc8aa31fe6d"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:23:56 crc kubenswrapper[4829]: I0122 00:23:56.414250 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/b8fd44cb-8191-4b46-8260-1bc8aa31fe6d-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.348719 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-1-build"] Jan 22 00:23:58 crc kubenswrapper[4829]: E0122 00:23:58.349349 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="extract-utilities" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349363 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="extract-utilities" Jan 22 00:23:58 crc kubenswrapper[4829]: E0122 00:23:58.349377 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="extract-content" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349384 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="extract-content" Jan 22 00:23:58 crc kubenswrapper[4829]: E0122 00:23:58.349399 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerName="manage-dockerfile" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349408 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerName="manage-dockerfile" Jan 22 00:23:58 crc kubenswrapper[4829]: E0122 00:23:58.349422 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="registry-server" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349429 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="registry-server" Jan 22 00:23:58 crc kubenswrapper[4829]: E0122 00:23:58.349438 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerName="docker-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349465 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerName="docker-build" Jan 22 00:23:58 crc kubenswrapper[4829]: E0122 00:23:58.349476 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerName="git-clone" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349484 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerName="git-clone" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349650 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b71e63a-fd51-47c2-aaee-d078155e3569" containerName="registry-server" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.349668 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8fd44cb-8191-4b46-8260-1bc8aa31fe6d" containerName="docker-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.350418 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.352230 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-ca" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.352758 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-sys-config" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.353133 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-1-global-ca" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.353320 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.370851 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.441772 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildcachedir\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.442175 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.442387 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-push\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.442610 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-system-configs\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.442820 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-root\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.443009 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.443204 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildworkdir\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.443380 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.443585 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-pull\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.544722 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildcachedir\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.544812 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.544890 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-push\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.544934 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-system-configs\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.544981 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-root\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.545038 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktnk8\" (UniqueName: \"kubernetes.io/projected/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-kube-api-access-ktnk8\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.544925 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildcachedir\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.545283 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.545409 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildworkdir\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.545498 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-node-pullsecrets\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.546181 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-root\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.545882 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-run\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.546311 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.546337 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildworkdir\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.546461 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-system-configs\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.546751 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-ca-bundles\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.547023 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-proxy-ca-bundles\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.547096 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-pull\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.547134 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.647831 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-run\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.647982 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:58 crc kubenswrapper[4829]: I0122 00:23:58.648283 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktnk8\" (UniqueName: \"kubernetes.io/projected/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-kube-api-access-ktnk8\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:59 crc kubenswrapper[4829]: I0122 00:23:59.035232 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-push\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:59 crc kubenswrapper[4829]: I0122 00:23:59.035450 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-pull\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:59 crc kubenswrapper[4829]: I0122 00:23:59.035740 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-blob-cache\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:59 crc kubenswrapper[4829]: I0122 00:23:59.035938 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-run\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:59 crc kubenswrapper[4829]: I0122 00:23:59.039135 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktnk8\" (UniqueName: \"kubernetes.io/projected/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-kube-api-access-ktnk8\") pod \"sg-core-1-build\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " pod="service-telemetry/sg-core-1-build" Jan 22 00:23:59 crc kubenswrapper[4829]: I0122 00:23:59.266783 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Jan 22 00:23:59 crc kubenswrapper[4829]: I0122 00:23:59.535512 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-1-build"] Jan 22 00:23:59 crc kubenswrapper[4829]: W0122 00:23:59.546688 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7da6c1e2_f6f7_40ce_a073_cd1834d4d037.slice/crio-26c9ace1b652836d83de23014b4a5fdd8d91bcb3e71c95fc2126014b984bf57c WatchSource:0}: Error finding container 26c9ace1b652836d83de23014b4a5fdd8d91bcb3e71c95fc2126014b984bf57c: Status 404 returned error can't find the container with id 26c9ace1b652836d83de23014b4a5fdd8d91bcb3e71c95fc2126014b984bf57c Jan 22 00:24:00 crc kubenswrapper[4829]: I0122 00:24:00.503850 4829 generic.go:334] "Generic (PLEG): container finished" podID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerID="8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179" exitCode=0 Jan 22 00:24:00 crc kubenswrapper[4829]: I0122 00:24:00.504102 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"7da6c1e2-f6f7-40ce-a073-cd1834d4d037","Type":"ContainerDied","Data":"8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179"} Jan 22 00:24:00 crc kubenswrapper[4829]: I0122 00:24:00.504193 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"7da6c1e2-f6f7-40ce-a073-cd1834d4d037","Type":"ContainerStarted","Data":"26c9ace1b652836d83de23014b4a5fdd8d91bcb3e71c95fc2126014b984bf57c"} Jan 22 00:24:01 crc kubenswrapper[4829]: I0122 00:24:01.529072 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"7da6c1e2-f6f7-40ce-a073-cd1834d4d037","Type":"ContainerStarted","Data":"e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b"} Jan 22 00:24:01 crc kubenswrapper[4829]: I0122 00:24:01.570980 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-1-build" podStartSLOduration=3.5709558169999998 podStartE2EDuration="3.570955817s" podCreationTimestamp="2026-01-22 00:23:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:24:01.56506849 +0000 UTC m=+1019.601310442" watchObservedRunningTime="2026-01-22 00:24:01.570955817 +0000 UTC m=+1019.607197739" Jan 22 00:24:08 crc kubenswrapper[4829]: I0122 00:24:08.728333 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Jan 22 00:24:08 crc kubenswrapper[4829]: I0122 00:24:08.729171 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/sg-core-1-build" podUID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerName="docker-build" containerID="cri-o://e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b" gracePeriod=30 Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.098747 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_7da6c1e2-f6f7-40ce-a073-cd1834d4d037/docker-build/0.log" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.099356 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294519 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-ca-bundles\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294582 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-push\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294601 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-proxy-ca-bundles\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294625 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-node-pullsecrets\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294655 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-root\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294703 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildworkdir\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294760 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-pull\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294777 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-blob-cache\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294796 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-run\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294822 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktnk8\" (UniqueName: \"kubernetes.io/projected/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-kube-api-access-ktnk8\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294807 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294839 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildcachedir\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.294919 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-system-configs\") pod \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\" (UID: \"7da6c1e2-f6f7-40ce-a073-cd1834d4d037\") " Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.295203 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.295498 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.295772 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.295846 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.295903 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.296181 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.297083 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.301003 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.302156 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.302360 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-kube-api-access-ktnk8" (OuterVolumeSpecName: "kube-api-access-ktnk8") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "kube-api-access-ktnk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.387670 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396381 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396408 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396418 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396428 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396440 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396449 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396459 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396467 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktnk8\" (UniqueName: \"kubernetes.io/projected/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-kube-api-access-ktnk8\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396474 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.396484 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.439060 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "7da6c1e2-f6f7-40ce-a073-cd1834d4d037" (UID: "7da6c1e2-f6f7-40ce-a073-cd1834d4d037"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.497534 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/7da6c1e2-f6f7-40ce-a073-cd1834d4d037-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.586791 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-1-build_7da6c1e2-f6f7-40ce-a073-cd1834d4d037/docker-build/0.log" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.587304 4829 generic.go:334] "Generic (PLEG): container finished" podID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerID="e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b" exitCode=1 Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.587350 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"7da6c1e2-f6f7-40ce-a073-cd1834d4d037","Type":"ContainerDied","Data":"e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b"} Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.587380 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-1-build" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.587387 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-1-build" event={"ID":"7da6c1e2-f6f7-40ce-a073-cd1834d4d037","Type":"ContainerDied","Data":"26c9ace1b652836d83de23014b4a5fdd8d91bcb3e71c95fc2126014b984bf57c"} Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.587416 4829 scope.go:117] "RemoveContainer" containerID="e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.628295 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-core-1-build"] Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.635913 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-core-1-build"] Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.639412 4829 scope.go:117] "RemoveContainer" containerID="8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.671554 4829 scope.go:117] "RemoveContainer" containerID="e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b" Jan 22 00:24:09 crc kubenswrapper[4829]: E0122 00:24:09.672032 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b\": container with ID starting with e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b not found: ID does not exist" containerID="e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.672153 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b"} err="failed to get container status \"e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b\": rpc error: code = NotFound desc = could not find container \"e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b\": container with ID starting with e5346af99bce7bbcad58fc635aee35ec8d5751e3b829ee99cd66755ce218c66b not found: ID does not exist" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.672246 4829 scope.go:117] "RemoveContainer" containerID="8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179" Jan 22 00:24:09 crc kubenswrapper[4829]: E0122 00:24:09.672654 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179\": container with ID starting with 8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179 not found: ID does not exist" containerID="8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179" Jan 22 00:24:09 crc kubenswrapper[4829]: I0122 00:24:09.672685 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179"} err="failed to get container status \"8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179\": rpc error: code = NotFound desc = could not find container \"8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179\": container with ID starting with 8d537f3c7e01957132ffcf1478afa4e7dbf2b2cecd1388a594deee8b6fb9b179 not found: ID does not exist" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.307284 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-core-2-build"] Jan 22 00:24:10 crc kubenswrapper[4829]: E0122 00:24:10.308003 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerName="docker-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.308024 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerName="docker-build" Jan 22 00:24:10 crc kubenswrapper[4829]: E0122 00:24:10.308042 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerName="manage-dockerfile" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.308054 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerName="manage-dockerfile" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.308203 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" containerName="docker-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.309467 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.311474 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-global-ca" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.312056 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.312446 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-ca" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.313127 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-core-2-sys-config" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.330266 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.510497 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-push\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.510714 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-root\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.510761 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildcachedir\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.510819 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-system-configs\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.510861 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildworkdir\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.510960 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-run\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.511037 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tncg\" (UniqueName: \"kubernetes.io/projected/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-kube-api-access-8tncg\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.511102 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.511143 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.511218 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.511380 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-pull\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.511464 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.568150 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7da6c1e2-f6f7-40ce-a073-cd1834d4d037" path="/var/lib/kubelet/pods/7da6c1e2-f6f7-40ce-a073-cd1834d4d037/volumes" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612279 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-pull\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612341 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612367 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-push\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612401 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-root\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612419 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildcachedir\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612436 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-system-configs\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612452 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildworkdir\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612470 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-run\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612485 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tncg\" (UniqueName: \"kubernetes.io/projected/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-kube-api-access-8tncg\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612505 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612522 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612559 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612602 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildcachedir\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.612708 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-node-pullsecrets\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.613142 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-blob-cache\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.613294 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildworkdir\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.613330 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-run\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.613519 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-proxy-ca-bundles\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.613652 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-system-configs\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.613672 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-root\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.613927 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-ca-bundles\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.627125 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-push\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.627944 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-pull\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.656207 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tncg\" (UniqueName: \"kubernetes.io/projected/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-kube-api-access-8tncg\") pod \"sg-core-2-build\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " pod="service-telemetry/sg-core-2-build" Jan 22 00:24:10 crc kubenswrapper[4829]: I0122 00:24:10.924527 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Jan 22 00:24:11 crc kubenswrapper[4829]: I0122 00:24:11.178588 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-core-2-build"] Jan 22 00:24:11 crc kubenswrapper[4829]: I0122 00:24:11.605158 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1","Type":"ContainerStarted","Data":"351cb757450fc358075ae60671a4972cb97cfa0d816c3ae7ed3d107d6ffc7452"} Jan 22 00:24:11 crc kubenswrapper[4829]: I0122 00:24:11.605224 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1","Type":"ContainerStarted","Data":"a4ee7b31a857a7bb19e592152e0513a645bcf39ab36ddee677cf009992f36060"} Jan 22 00:24:12 crc kubenswrapper[4829]: I0122 00:24:12.614096 4829 generic.go:334] "Generic (PLEG): container finished" podID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerID="351cb757450fc358075ae60671a4972cb97cfa0d816c3ae7ed3d107d6ffc7452" exitCode=0 Jan 22 00:24:12 crc kubenswrapper[4829]: I0122 00:24:12.614157 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1","Type":"ContainerDied","Data":"351cb757450fc358075ae60671a4972cb97cfa0d816c3ae7ed3d107d6ffc7452"} Jan 22 00:24:13 crc kubenswrapper[4829]: I0122 00:24:13.631082 4829 generic.go:334] "Generic (PLEG): container finished" podID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerID="656902daf38b245c3c8761d1d9cc595821fdd6ad915263909417217e59e1a9a3" exitCode=0 Jan 22 00:24:13 crc kubenswrapper[4829]: I0122 00:24:13.631210 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1","Type":"ContainerDied","Data":"656902daf38b245c3c8761d1d9cc595821fdd6ad915263909417217e59e1a9a3"} Jan 22 00:24:13 crc kubenswrapper[4829]: I0122 00:24:13.666423 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-2-build_1d1c4c78-5e7d-4ed5-b394-73698d1d02d1/manage-dockerfile/0.log" Jan 22 00:24:14 crc kubenswrapper[4829]: I0122 00:24:14.643418 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1","Type":"ContainerStarted","Data":"07e8ac511836d01881994ad24c2fa4f74d1c2347950fd516f3ed3741761efcb7"} Jan 22 00:24:15 crc kubenswrapper[4829]: I0122 00:24:15.681452 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-core-2-build" podStartSLOduration=5.6814321759999995 podStartE2EDuration="5.681432176s" podCreationTimestamp="2026-01-22 00:24:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:24:15.681088715 +0000 UTC m=+1033.717330627" watchObservedRunningTime="2026-01-22 00:24:15.681432176 +0000 UTC m=+1033.717674088" Jan 22 00:25:34 crc kubenswrapper[4829]: I0122 00:25:34.658347 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:25:34 crc kubenswrapper[4829]: I0122 00:25:34.659399 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:26:04 crc kubenswrapper[4829]: I0122 00:26:04.658366 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:26:04 crc kubenswrapper[4829]: I0122 00:26:04.659163 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:26:34 crc kubenswrapper[4829]: I0122 00:26:34.658741 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:26:34 crc kubenswrapper[4829]: I0122 00:26:34.659386 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:26:34 crc kubenswrapper[4829]: I0122 00:26:34.659450 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:26:34 crc kubenswrapper[4829]: I0122 00:26:34.660361 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0483a801a05aaf6e880f016f24ef4b653fbb05a78257dcb329328e91fdd090fc"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:26:34 crc kubenswrapper[4829]: I0122 00:26:34.660467 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://0483a801a05aaf6e880f016f24ef4b653fbb05a78257dcb329328e91fdd090fc" gracePeriod=600 Jan 22 00:26:35 crc kubenswrapper[4829]: I0122 00:26:35.606406 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="0483a801a05aaf6e880f016f24ef4b653fbb05a78257dcb329328e91fdd090fc" exitCode=0 Jan 22 00:26:35 crc kubenswrapper[4829]: I0122 00:26:35.606482 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"0483a801a05aaf6e880f016f24ef4b653fbb05a78257dcb329328e91fdd090fc"} Jan 22 00:26:35 crc kubenswrapper[4829]: I0122 00:26:35.606585 4829 scope.go:117] "RemoveContainer" containerID="3fcfdecd12f2ea6bd608be28228c0387268ca752f7a44968b5bed6691aacbbf6" Jan 22 00:26:36 crc kubenswrapper[4829]: I0122 00:26:36.617987 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"dcae494e6dd96d020976f8bde0ed9bba1101db5e0723ced80c6af5f0103c9228"} Jan 22 00:27:35 crc kubenswrapper[4829]: I0122 00:27:35.108366 4829 generic.go:334] "Generic (PLEG): container finished" podID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerID="07e8ac511836d01881994ad24c2fa4f74d1c2347950fd516f3ed3741761efcb7" exitCode=0 Jan 22 00:27:35 crc kubenswrapper[4829]: I0122 00:27:35.108493 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1","Type":"ContainerDied","Data":"07e8ac511836d01881994ad24c2fa4f74d1c2347950fd516f3ed3741761efcb7"} Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.381184 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427658 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-blob-cache\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427714 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-push\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427776 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildworkdir\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427795 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-root\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427847 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-node-pullsecrets\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427878 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-ca-bundles\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427907 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-pull\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427948 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-run\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.427977 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-proxy-ca-bundles\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.428001 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tncg\" (UniqueName: \"kubernetes.io/projected/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-kube-api-access-8tncg\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.428036 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-system-configs\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.428067 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildcachedir\") pod \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\" (UID: \"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1\") " Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.428309 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.429326 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.430728 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.430945 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.431201 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.437697 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-kube-api-access-8tncg" (OuterVolumeSpecName: "kube-api-access-8tncg") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "kube-api-access-8tncg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.438813 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.440945 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.441864 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.452383 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530269 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530332 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530351 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530372 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530394 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530411 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tncg\" (UniqueName: \"kubernetes.io/projected/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-kube-api-access-8tncg\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530428 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530447 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530464 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.530482 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.758252 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:36 crc kubenswrapper[4829]: I0122 00:27:36.835232 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:37 crc kubenswrapper[4829]: I0122 00:27:37.126499 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-core-2-build" event={"ID":"1d1c4c78-5e7d-4ed5-b394-73698d1d02d1","Type":"ContainerDied","Data":"a4ee7b31a857a7bb19e592152e0513a645bcf39ab36ddee677cf009992f36060"} Jan 22 00:27:37 crc kubenswrapper[4829]: I0122 00:27:37.126551 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4ee7b31a857a7bb19e592152e0513a645bcf39ab36ddee677cf009992f36060" Jan 22 00:27:37 crc kubenswrapper[4829]: I0122 00:27:37.126641 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-core-2-build" Jan 22 00:27:39 crc kubenswrapper[4829]: I0122 00:27:39.194398 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" (UID: "1d1c4c78-5e7d-4ed5-b394-73698d1d02d1"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:39 crc kubenswrapper[4829]: I0122 00:27:39.270576 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/1d1c4c78-5e7d-4ed5-b394-73698d1d02d1-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.654862 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-1-build"] Jan 22 00:27:41 crc kubenswrapper[4829]: E0122 00:27:41.655129 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerName="manage-dockerfile" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.655144 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerName="manage-dockerfile" Jan 22 00:27:41 crc kubenswrapper[4829]: E0122 00:27:41.655159 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerName="git-clone" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.655167 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerName="git-clone" Jan 22 00:27:41 crc kubenswrapper[4829]: E0122 00:27:41.655175 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerName="docker-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.655184 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerName="docker-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.655325 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d1c4c78-5e7d-4ed5-b394-73698d1d02d1" containerName="docker-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.656059 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.658181 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-sys-config" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.659063 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-global-ca" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.659232 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-1-ca" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.659326 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.675586 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709301 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709348 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709377 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709402 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709436 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709463 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-pull\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709506 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709583 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709608 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-push\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709659 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709699 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.709723 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6pmx\" (UniqueName: \"kubernetes.io/projected/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-kube-api-access-k6pmx\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810683 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6pmx\" (UniqueName: \"kubernetes.io/projected/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-kube-api-access-k6pmx\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810743 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810772 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810806 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810830 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810868 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810901 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-pull\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.810949 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811003 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-push\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811027 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811051 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811076 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811261 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-node-pullsecrets\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811338 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-run\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811469 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildworkdir\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811602 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildcachedir\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811750 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-system-configs\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.811782 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-blob-cache\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.812007 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-proxy-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.812407 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-ca-bundles\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.813018 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-root\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.817849 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-push\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.818235 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-pull\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:41 crc kubenswrapper[4829]: I0122 00:27:41.829846 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6pmx\" (UniqueName: \"kubernetes.io/projected/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-kube-api-access-k6pmx\") pod \"sg-bridge-1-build\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:42 crc kubenswrapper[4829]: I0122 00:27:42.011765 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:42 crc kubenswrapper[4829]: I0122 00:27:42.250581 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Jan 22 00:27:43 crc kubenswrapper[4829]: I0122 00:27:43.172254 4829 generic.go:334] "Generic (PLEG): container finished" podID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" containerID="f60432f5380ef13842f617d5cdba7cc65172f5204d94303264edaf5f401d2021" exitCode=0 Jan 22 00:27:43 crc kubenswrapper[4829]: I0122 00:27:43.172329 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b","Type":"ContainerDied","Data":"f60432f5380ef13842f617d5cdba7cc65172f5204d94303264edaf5f401d2021"} Jan 22 00:27:43 crc kubenswrapper[4829]: I0122 00:27:43.172406 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b","Type":"ContainerStarted","Data":"7ca3f85754a3e3adbc96fa3f5361b5fb20f45c430684194c4637a2731c6dd800"} Jan 22 00:27:44 crc kubenswrapper[4829]: I0122 00:27:44.178435 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b","Type":"ContainerStarted","Data":"144da8b574be7312e6ab254834bd89eef06d2491ed183ac3ef46f95145e0091d"} Jan 22 00:27:44 crc kubenswrapper[4829]: I0122 00:27:44.198264 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-1-build" podStartSLOduration=3.198243385 podStartE2EDuration="3.198243385s" podCreationTimestamp="2026-01-22 00:27:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:27:44.196979875 +0000 UTC m=+1242.233221817" watchObservedRunningTime="2026-01-22 00:27:44.198243385 +0000 UTC m=+1242.234485297" Jan 22 00:27:51 crc kubenswrapper[4829]: I0122 00:27:51.226372 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b/docker-build/0.log" Jan 22 00:27:51 crc kubenswrapper[4829]: I0122 00:27:51.227418 4829 generic.go:334] "Generic (PLEG): container finished" podID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" containerID="144da8b574be7312e6ab254834bd89eef06d2491ed183ac3ef46f95145e0091d" exitCode=1 Jan 22 00:27:51 crc kubenswrapper[4829]: I0122 00:27:51.227470 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b","Type":"ContainerDied","Data":"144da8b574be7312e6ab254834bd89eef06d2491ed183ac3ef46f95145e0091d"} Jan 22 00:27:51 crc kubenswrapper[4829]: I0122 00:27:51.892367 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.547830 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b/docker-build/0.log" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.549044 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659189 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-blob-cache\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659230 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-node-pullsecrets\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659250 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-system-configs\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659280 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-ca-bundles\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659305 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6pmx\" (UniqueName: \"kubernetes.io/projected/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-kube-api-access-k6pmx\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659351 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659375 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-run\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659408 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-proxy-ca-bundles\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659430 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildworkdir\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659454 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-push\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659479 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildcachedir\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659517 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-pull\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659602 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-root\") pod \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\" (UID: \"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b\") " Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.659878 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.660053 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.660213 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.660302 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.660320 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.660657 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.661111 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.665094 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.665109 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.665258 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-kube-api-access-k6pmx" (OuterVolumeSpecName: "kube-api-access-k6pmx") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "kube-api-access-k6pmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.718771 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761641 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761671 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761681 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761708 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6pmx\" (UniqueName: \"kubernetes.io/projected/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-kube-api-access-k6pmx\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761717 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761726 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761738 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761746 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761754 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:52 crc kubenswrapper[4829]: I0122 00:27:52.761764 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.011675 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" (UID: "e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.065470 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.244014 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-1-build_e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b/docker-build/0.log" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.244638 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-1-build" event={"ID":"e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b","Type":"ContainerDied","Data":"7ca3f85754a3e3adbc96fa3f5361b5fb20f45c430684194c4637a2731c6dd800"} Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.244685 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ca3f85754a3e3adbc96fa3f5361b5fb20f45c430684194c4637a2731c6dd800" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.244757 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-1-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.296026 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.304371 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/sg-bridge-1-build"] Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.504464 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/sg-bridge-2-build"] Jan 22 00:27:53 crc kubenswrapper[4829]: E0122 00:27:53.505085 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" containerName="manage-dockerfile" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.505213 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" containerName="manage-dockerfile" Jan 22 00:27:53 crc kubenswrapper[4829]: E0122 00:27:53.505344 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" containerName="docker-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.505450 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" containerName="docker-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.505804 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" containerName="docker-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.507228 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.509377 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.510860 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-sys-config" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.511190 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-global-ca" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.511430 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"sg-bridge-2-ca" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.525493 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.572676 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.572729 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.572892 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.572935 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.572959 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.573039 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-push\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.573068 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.573093 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.573135 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.573166 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-pull\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.573193 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.573286 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvdcn\" (UniqueName: \"kubernetes.io/projected/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-kube-api-access-kvdcn\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674532 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-push\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674621 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674645 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674705 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674738 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-pull\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674765 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674798 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvdcn\" (UniqueName: \"kubernetes.io/projected/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-kube-api-access-kvdcn\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674855 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674879 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674948 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.674976 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.675004 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.675141 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-node-pullsecrets\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.675196 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildworkdir\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.675277 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildcachedir\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.675567 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-blob-cache\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.675785 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-proxy-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.675974 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-ca-bundles\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.676177 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-run\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.676501 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-root\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.677193 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-system-configs\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.679180 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-push\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.680001 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-pull\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.698015 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvdcn\" (UniqueName: \"kubernetes.io/projected/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-kube-api-access-kvdcn\") pod \"sg-bridge-2-build\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:53 crc kubenswrapper[4829]: I0122 00:27:53.824517 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Jan 22 00:27:54 crc kubenswrapper[4829]: I0122 00:27:54.010784 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/sg-bridge-2-build"] Jan 22 00:27:54 crc kubenswrapper[4829]: I0122 00:27:54.254587 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"bee7053e-10b4-4b1b-9ff2-1a96e8814a26","Type":"ContainerStarted","Data":"b449c9dc0818c3dd015e7b8b9e8e0ba43264dc429523f2155f317a9a008bf71c"} Jan 22 00:27:54 crc kubenswrapper[4829]: I0122 00:27:54.566063 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b" path="/var/lib/kubelet/pods/e0ae54ca-8f9b-4fc8-bed8-ac8d6ac0f45b/volumes" Jan 22 00:27:55 crc kubenswrapper[4829]: I0122 00:27:55.266601 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"bee7053e-10b4-4b1b-9ff2-1a96e8814a26","Type":"ContainerStarted","Data":"cd1991035bff2a7eaa49dada0fde3feeec0eb0b9c1fee1c8495795ac40766755"} Jan 22 00:27:56 crc kubenswrapper[4829]: I0122 00:27:56.276199 4829 generic.go:334] "Generic (PLEG): container finished" podID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerID="cd1991035bff2a7eaa49dada0fde3feeec0eb0b9c1fee1c8495795ac40766755" exitCode=0 Jan 22 00:27:56 crc kubenswrapper[4829]: I0122 00:27:56.276263 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"bee7053e-10b4-4b1b-9ff2-1a96e8814a26","Type":"ContainerDied","Data":"cd1991035bff2a7eaa49dada0fde3feeec0eb0b9c1fee1c8495795ac40766755"} Jan 22 00:27:57 crc kubenswrapper[4829]: I0122 00:27:57.286179 4829 generic.go:334] "Generic (PLEG): container finished" podID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerID="414f5dcd0ad00c58481c6e2519156012db768848292471a266980cb16be36834" exitCode=0 Jan 22 00:27:57 crc kubenswrapper[4829]: I0122 00:27:57.286251 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"bee7053e-10b4-4b1b-9ff2-1a96e8814a26","Type":"ContainerDied","Data":"414f5dcd0ad00c58481c6e2519156012db768848292471a266980cb16be36834"} Jan 22 00:27:57 crc kubenswrapper[4829]: I0122 00:27:57.334274 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-2-build_bee7053e-10b4-4b1b-9ff2-1a96e8814a26/manage-dockerfile/0.log" Jan 22 00:27:58 crc kubenswrapper[4829]: I0122 00:27:58.304074 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"bee7053e-10b4-4b1b-9ff2-1a96e8814a26","Type":"ContainerStarted","Data":"78a98b8ed76b7e4dc65cc61548428c56c57856eb3fa0e15adc1296e6dcf949db"} Jan 22 00:27:58 crc kubenswrapper[4829]: I0122 00:27:58.356874 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/sg-bridge-2-build" podStartSLOduration=5.356839548 podStartE2EDuration="5.356839548s" podCreationTimestamp="2026-01-22 00:27:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:27:58.346521595 +0000 UTC m=+1256.382763547" watchObservedRunningTime="2026-01-22 00:27:58.356839548 +0000 UTC m=+1256.393081500" Jan 22 00:28:48 crc kubenswrapper[4829]: I0122 00:28:48.661916 4829 generic.go:334] "Generic (PLEG): container finished" podID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerID="78a98b8ed76b7e4dc65cc61548428c56c57856eb3fa0e15adc1296e6dcf949db" exitCode=0 Jan 22 00:28:48 crc kubenswrapper[4829]: I0122 00:28:48.662029 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"bee7053e-10b4-4b1b-9ff2-1a96e8814a26","Type":"ContainerDied","Data":"78a98b8ed76b7e4dc65cc61548428c56c57856eb3fa0e15adc1296e6dcf949db"} Jan 22 00:28:49 crc kubenswrapper[4829]: I0122 00:28:49.957313 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055017 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvdcn\" (UniqueName: \"kubernetes.io/projected/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-kube-api-access-kvdcn\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055076 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-node-pullsecrets\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055114 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildcachedir\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055145 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-push\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055175 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildworkdir\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055203 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-run\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055226 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-pull\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055251 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-system-configs\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055274 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-blob-cache\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055304 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-root\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055349 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-ca-bundles\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055377 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-proxy-ca-bundles\") pod \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\" (UID: \"bee7053e-10b4-4b1b-9ff2-1a96e8814a26\") " Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055829 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.055937 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.056637 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.056680 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.057180 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.057223 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.057249 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.062225 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.062301 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.062695 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-kube-api-access-kvdcn" (OuterVolumeSpecName: "kube-api-access-kvdcn") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "kube-api-access-kvdcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.156910 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.156952 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.156964 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.156977 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.156991 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.157003 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.157014 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.157025 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvdcn\" (UniqueName: \"kubernetes.io/projected/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-kube-api-access-kvdcn\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.157036 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.157049 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.161609 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.258129 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.679448 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/sg-bridge-2-build" event={"ID":"bee7053e-10b4-4b1b-9ff2-1a96e8814a26","Type":"ContainerDied","Data":"b449c9dc0818c3dd015e7b8b9e8e0ba43264dc429523f2155f317a9a008bf71c"} Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.679499 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b449c9dc0818c3dd015e7b8b9e8e0ba43264dc429523f2155f317a9a008bf71c" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.679526 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/sg-bridge-2-build" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.856065 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "bee7053e-10b4-4b1b-9ff2-1a96e8814a26" (UID: "bee7053e-10b4-4b1b-9ff2-1a96e8814a26"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:28:50 crc kubenswrapper[4829]: I0122 00:28:50.866063 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/bee7053e-10b4-4b1b-9ff2-1a96e8814a26-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.854505 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Jan 22 00:28:54 crc kubenswrapper[4829]: E0122 00:28:54.855348 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerName="manage-dockerfile" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.855372 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerName="manage-dockerfile" Jan 22 00:28:54 crc kubenswrapper[4829]: E0122 00:28:54.855391 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerName="git-clone" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.855406 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerName="git-clone" Jan 22 00:28:54 crc kubenswrapper[4829]: E0122 00:28:54.855423 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerName="docker-build" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.855437 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerName="docker-build" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.855741 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="bee7053e-10b4-4b1b-9ff2-1a96e8814a26" containerName="docker-build" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.857087 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.859591 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-ca" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.859651 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-global-ca" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.861298 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.864527 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-1-sys-config" Jan 22 00:28:54 crc kubenswrapper[4829]: I0122 00:28:54.882166 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026350 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026412 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb98n\" (UniqueName: \"kubernetes.io/projected/5901dd05-0e6a-4dae-8c6a-1c1376525eff-kube-api-access-bb98n\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026442 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026482 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026532 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026656 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026788 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026843 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.026960 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.027030 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.027580 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.027649 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129318 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129404 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129445 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129479 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129508 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129604 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129640 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129678 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129728 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129783 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb98n\" (UniqueName: \"kubernetes.io/projected/5901dd05-0e6a-4dae-8c6a-1c1376525eff-kube-api-access-bb98n\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129821 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.129978 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-node-pullsecrets\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.130064 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildcachedir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.130534 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildworkdir\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.130355 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.130756 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-root\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.130876 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-run\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.131367 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-blob-cache\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.132056 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.132241 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-system-configs\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.132585 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-ca-bundles\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.139252 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-push\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.139798 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-pull\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.150055 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb98n\" (UniqueName: \"kubernetes.io/projected/5901dd05-0e6a-4dae-8c6a-1c1376525eff-kube-api-access-bb98n\") pod \"prometheus-webhook-snmp-1-build\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.183562 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.648043 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Jan 22 00:28:55 crc kubenswrapper[4829]: I0122 00:28:55.719826 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"5901dd05-0e6a-4dae-8c6a-1c1376525eff","Type":"ContainerStarted","Data":"f1e2e53bd8e2d75153d7a2f82c56cb91f3abde7d7406641a53e774fb4f20c2d7"} Jan 22 00:28:57 crc kubenswrapper[4829]: I0122 00:28:57.742903 4829 generic.go:334] "Generic (PLEG): container finished" podID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerID="3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb" exitCode=0 Jan 22 00:28:57 crc kubenswrapper[4829]: I0122 00:28:57.742991 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"5901dd05-0e6a-4dae-8c6a-1c1376525eff","Type":"ContainerDied","Data":"3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb"} Jan 22 00:28:58 crc kubenswrapper[4829]: I0122 00:28:58.755334 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"5901dd05-0e6a-4dae-8c6a-1c1376525eff","Type":"ContainerStarted","Data":"9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5"} Jan 22 00:28:58 crc kubenswrapper[4829]: I0122 00:28:58.781224 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-1-build" podStartSLOduration=4.781197658 podStartE2EDuration="4.781197658s" podCreationTimestamp="2026-01-22 00:28:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:28:58.77835896 +0000 UTC m=+1316.814600882" watchObservedRunningTime="2026-01-22 00:28:58.781197658 +0000 UTC m=+1316.817439610" Jan 22 00:29:04 crc kubenswrapper[4829]: I0122 00:29:04.658384 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:29:04 crc kubenswrapper[4829]: I0122 00:29:04.658926 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:29:04 crc kubenswrapper[4829]: I0122 00:29:04.846135 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Jan 22 00:29:04 crc kubenswrapper[4829]: I0122 00:29:04.846399 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/prometheus-webhook-snmp-1-build" podUID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerName="docker-build" containerID="cri-o://9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5" gracePeriod=30 Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.251075 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_5901dd05-0e6a-4dae-8c6a-1c1376525eff/docker-build/0.log" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.251819 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384581 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildcachedir\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384672 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-proxy-ca-bundles\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384689 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-node-pullsecrets\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384703 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384733 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-root\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384758 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb98n\" (UniqueName: \"kubernetes.io/projected/5901dd05-0e6a-4dae-8c6a-1c1376525eff-kube-api-access-bb98n\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384792 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-run\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384858 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-push\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384879 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-blob-cache\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384900 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-pull\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384922 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-ca-bundles\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384937 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-system-configs\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384974 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildworkdir\") pod \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\" (UID: \"5901dd05-0e6a-4dae-8c6a-1c1376525eff\") " Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.384761 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.386297 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.386488 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.386602 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.386667 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.386879 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.387297 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.387346 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.387372 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.387396 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.387419 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.387492 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/5901dd05-0e6a-4dae-8c6a-1c1376525eff-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.387517 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.391509 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5901dd05-0e6a-4dae-8c6a-1c1376525eff-kube-api-access-bb98n" (OuterVolumeSpecName: "kube-api-access-bb98n") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "kube-api-access-bb98n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.392816 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.393018 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.433117 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.487930 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb98n\" (UniqueName: \"kubernetes.io/projected/5901dd05-0e6a-4dae-8c6a-1c1376525eff-kube-api-access-bb98n\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.487970 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.487981 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.487990 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/5901dd05-0e6a-4dae-8c6a-1c1376525eff-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.710960 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "5901dd05-0e6a-4dae-8c6a-1c1376525eff" (UID: "5901dd05-0e6a-4dae-8c6a-1c1376525eff"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.791776 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/5901dd05-0e6a-4dae-8c6a-1c1376525eff-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.804916 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-1-build_5901dd05-0e6a-4dae-8c6a-1c1376525eff/docker-build/0.log" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.805798 4829 generic.go:334] "Generic (PLEG): container finished" podID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerID="9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5" exitCode=1 Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.805853 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"5901dd05-0e6a-4dae-8c6a-1c1376525eff","Type":"ContainerDied","Data":"9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5"} Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.805870 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-1-build" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.805890 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-1-build" event={"ID":"5901dd05-0e6a-4dae-8c6a-1c1376525eff","Type":"ContainerDied","Data":"f1e2e53bd8e2d75153d7a2f82c56cb91f3abde7d7406641a53e774fb4f20c2d7"} Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.805915 4829 scope.go:117] "RemoveContainer" containerID="9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.838312 4829 scope.go:117] "RemoveContainer" containerID="3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.845114 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.851705 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-1-build"] Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.866445 4829 scope.go:117] "RemoveContainer" containerID="9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5" Jan 22 00:29:05 crc kubenswrapper[4829]: E0122 00:29:05.867590 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5\": container with ID starting with 9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5 not found: ID does not exist" containerID="9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.867653 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5"} err="failed to get container status \"9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5\": rpc error: code = NotFound desc = could not find container \"9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5\": container with ID starting with 9486dc31c24fc44136fdeb7cf07fb126a26f23d00c95ac4f14b0ac1ff25a49a5 not found: ID does not exist" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.867692 4829 scope.go:117] "RemoveContainer" containerID="3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb" Jan 22 00:29:05 crc kubenswrapper[4829]: E0122 00:29:05.868169 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb\": container with ID starting with 3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb not found: ID does not exist" containerID="3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb" Jan 22 00:29:05 crc kubenswrapper[4829]: I0122 00:29:05.868346 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb"} err="failed to get container status \"3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb\": rpc error: code = NotFound desc = could not find container \"3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb\": container with ID starting with 3b196660308824b3da208903b2b54fc72f60c8087fd1f1738c82c3657499eaeb not found: ID does not exist" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.456514 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Jan 22 00:29:06 crc kubenswrapper[4829]: E0122 00:29:06.456909 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerName="docker-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.456933 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerName="docker-build" Jan 22 00:29:06 crc kubenswrapper[4829]: E0122 00:29:06.456951 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerName="manage-dockerfile" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.456963 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerName="manage-dockerfile" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.457149 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" containerName="docker-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.458196 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.461864 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-sys-config" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.462570 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-ll9bh" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.462857 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-global-ca" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.464691 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-webhook-snmp-2-ca" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.486263 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.533580 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.533697 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.533817 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.533893 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534005 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534080 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534165 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534226 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534345 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfgxd\" (UniqueName: \"kubernetes.io/projected/391f97ab-a0ff-4185-bf21-e9563069d026-kube-api-access-kfgxd\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534462 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534535 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.534642 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.570439 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5901dd05-0e6a-4dae-8c6a-1c1376525eff" path="/var/lib/kubelet/pods/5901dd05-0e6a-4dae-8c6a-1c1376525eff/volumes" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636244 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636325 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636433 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636499 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636607 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636677 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636746 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfgxd\" (UniqueName: \"kubernetes.io/projected/391f97ab-a0ff-4185-bf21-e9563069d026-kube-api-access-kfgxd\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636812 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636867 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.636937 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.637388 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-buildcachedir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.637424 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.637489 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.637576 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-proxy-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.637763 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-node-pullsecrets\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.638243 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-root\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.638484 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-buildworkdir\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.638632 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-run\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.638786 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-build-blob-cache\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.638881 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-system-configs\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.639769 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-ca-bundles\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.644458 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-push\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.645985 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-pull\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.666370 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfgxd\" (UniqueName: \"kubernetes.io/projected/391f97ab-a0ff-4185-bf21-e9563069d026-kube-api-access-kfgxd\") pod \"prometheus-webhook-snmp-2-build\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:06 crc kubenswrapper[4829]: I0122 00:29:06.837659 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:29:07 crc kubenswrapper[4829]: I0122 00:29:07.082208 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-webhook-snmp-2-build"] Jan 22 00:29:07 crc kubenswrapper[4829]: I0122 00:29:07.827996 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"391f97ab-a0ff-4185-bf21-e9563069d026","Type":"ContainerStarted","Data":"19f9e1d4858879db9f36af17b32399637926a33e8f68188cd54a2907eadf32a4"} Jan 22 00:29:07 crc kubenswrapper[4829]: I0122 00:29:07.828062 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"391f97ab-a0ff-4185-bf21-e9563069d026","Type":"ContainerStarted","Data":"68548c56a9df0a0d9e659d230355724e126c58c8bb43852a208a28a1e18922bb"} Jan 22 00:29:08 crc kubenswrapper[4829]: I0122 00:29:08.843997 4829 generic.go:334] "Generic (PLEG): container finished" podID="391f97ab-a0ff-4185-bf21-e9563069d026" containerID="19f9e1d4858879db9f36af17b32399637926a33e8f68188cd54a2907eadf32a4" exitCode=0 Jan 22 00:29:08 crc kubenswrapper[4829]: I0122 00:29:08.844068 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"391f97ab-a0ff-4185-bf21-e9563069d026","Type":"ContainerDied","Data":"19f9e1d4858879db9f36af17b32399637926a33e8f68188cd54a2907eadf32a4"} Jan 22 00:29:09 crc kubenswrapper[4829]: I0122 00:29:09.854866 4829 generic.go:334] "Generic (PLEG): container finished" podID="391f97ab-a0ff-4185-bf21-e9563069d026" containerID="67133616c45d35ae97829fea5fd6ba259c1f5760d55e1becb215c6370a7aad0f" exitCode=0 Jan 22 00:29:09 crc kubenswrapper[4829]: I0122 00:29:09.855782 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"391f97ab-a0ff-4185-bf21-e9563069d026","Type":"ContainerDied","Data":"67133616c45d35ae97829fea5fd6ba259c1f5760d55e1becb215c6370a7aad0f"} Jan 22 00:29:09 crc kubenswrapper[4829]: I0122 00:29:09.904267 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-2-build_391f97ab-a0ff-4185-bf21-e9563069d026/manage-dockerfile/0.log" Jan 22 00:29:10 crc kubenswrapper[4829]: I0122 00:29:10.863887 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"391f97ab-a0ff-4185-bf21-e9563069d026","Type":"ContainerStarted","Data":"065a702cd4169349980125ee645ea234ab595861863017bb1ab99d82a059032e"} Jan 22 00:29:10 crc kubenswrapper[4829]: I0122 00:29:10.895040 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-webhook-snmp-2-build" podStartSLOduration=4.895022242 podStartE2EDuration="4.895022242s" podCreationTimestamp="2026-01-22 00:29:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:29:10.890555543 +0000 UTC m=+1328.926797475" watchObservedRunningTime="2026-01-22 00:29:10.895022242 +0000 UTC m=+1328.931264164" Jan 22 00:29:34 crc kubenswrapper[4829]: I0122 00:29:34.658678 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:29:34 crc kubenswrapper[4829]: I0122 00:29:34.661240 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.141533 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln"] Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.143038 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.145437 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.147285 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.188579 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln"] Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.341466 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv46k\" (UniqueName: \"kubernetes.io/projected/7410ae33-4e2e-4147-9890-36c889741f39-kube-api-access-bv46k\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.341535 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7410ae33-4e2e-4147-9890-36c889741f39-secret-volume\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.341616 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7410ae33-4e2e-4147-9890-36c889741f39-config-volume\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.443160 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv46k\" (UniqueName: \"kubernetes.io/projected/7410ae33-4e2e-4147-9890-36c889741f39-kube-api-access-bv46k\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.443261 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7410ae33-4e2e-4147-9890-36c889741f39-secret-volume\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.443329 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7410ae33-4e2e-4147-9890-36c889741f39-config-volume\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.444313 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7410ae33-4e2e-4147-9890-36c889741f39-config-volume\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.453554 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7410ae33-4e2e-4147-9890-36c889741f39-secret-volume\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.461731 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv46k\" (UniqueName: \"kubernetes.io/projected/7410ae33-4e2e-4147-9890-36c889741f39-kube-api-access-bv46k\") pod \"collect-profiles-29484030-t75ln\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:00 crc kubenswrapper[4829]: I0122 00:30:00.760078 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:01 crc kubenswrapper[4829]: I0122 00:30:01.005321 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln"] Jan 22 00:30:01 crc kubenswrapper[4829]: I0122 00:30:01.216231 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" event={"ID":"7410ae33-4e2e-4147-9890-36c889741f39","Type":"ContainerStarted","Data":"c64e5fd408d1f7581b988b4f2ad7cb4ae772354f7c5b374368c1b647f7213501"} Jan 22 00:30:01 crc kubenswrapper[4829]: I0122 00:30:01.216276 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" event={"ID":"7410ae33-4e2e-4147-9890-36c889741f39","Type":"ContainerStarted","Data":"adde210d0718842ec1d3e6049418f12976fd6f754d22a3f4baca67c7a712d433"} Jan 22 00:30:01 crc kubenswrapper[4829]: I0122 00:30:01.235604 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" podStartSLOduration=1.235582753 podStartE2EDuration="1.235582753s" podCreationTimestamp="2026-01-22 00:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:30:01.230417183 +0000 UTC m=+1379.266659095" watchObservedRunningTime="2026-01-22 00:30:01.235582753 +0000 UTC m=+1379.271824665" Jan 22 00:30:02 crc kubenswrapper[4829]: I0122 00:30:02.228486 4829 generic.go:334] "Generic (PLEG): container finished" podID="7410ae33-4e2e-4147-9890-36c889741f39" containerID="c64e5fd408d1f7581b988b4f2ad7cb4ae772354f7c5b374368c1b647f7213501" exitCode=0 Jan 22 00:30:02 crc kubenswrapper[4829]: I0122 00:30:02.228621 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" event={"ID":"7410ae33-4e2e-4147-9890-36c889741f39","Type":"ContainerDied","Data":"c64e5fd408d1f7581b988b4f2ad7cb4ae772354f7c5b374368c1b647f7213501"} Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.482204 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.586152 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bv46k\" (UniqueName: \"kubernetes.io/projected/7410ae33-4e2e-4147-9890-36c889741f39-kube-api-access-bv46k\") pod \"7410ae33-4e2e-4147-9890-36c889741f39\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.586213 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7410ae33-4e2e-4147-9890-36c889741f39-secret-volume\") pod \"7410ae33-4e2e-4147-9890-36c889741f39\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.586290 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7410ae33-4e2e-4147-9890-36c889741f39-config-volume\") pod \"7410ae33-4e2e-4147-9890-36c889741f39\" (UID: \"7410ae33-4e2e-4147-9890-36c889741f39\") " Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.587251 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7410ae33-4e2e-4147-9890-36c889741f39-config-volume" (OuterVolumeSpecName: "config-volume") pod "7410ae33-4e2e-4147-9890-36c889741f39" (UID: "7410ae33-4e2e-4147-9890-36c889741f39"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.591783 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7410ae33-4e2e-4147-9890-36c889741f39-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7410ae33-4e2e-4147-9890-36c889741f39" (UID: "7410ae33-4e2e-4147-9890-36c889741f39"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.592422 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7410ae33-4e2e-4147-9890-36c889741f39-kube-api-access-bv46k" (OuterVolumeSpecName: "kube-api-access-bv46k") pod "7410ae33-4e2e-4147-9890-36c889741f39" (UID: "7410ae33-4e2e-4147-9890-36c889741f39"). InnerVolumeSpecName "kube-api-access-bv46k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.687692 4829 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7410ae33-4e2e-4147-9890-36c889741f39-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.688224 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bv46k\" (UniqueName: \"kubernetes.io/projected/7410ae33-4e2e-4147-9890-36c889741f39-kube-api-access-bv46k\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:03 crc kubenswrapper[4829]: I0122 00:30:03.688273 4829 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7410ae33-4e2e-4147-9890-36c889741f39-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.246500 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" event={"ID":"7410ae33-4e2e-4147-9890-36c889741f39","Type":"ContainerDied","Data":"adde210d0718842ec1d3e6049418f12976fd6f754d22a3f4baca67c7a712d433"} Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.246588 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adde210d0718842ec1d3e6049418f12976fd6f754d22a3f4baca67c7a712d433" Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.246594 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln" Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.658433 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.658588 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.658662 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.659519 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dcae494e6dd96d020976f8bde0ed9bba1101db5e0723ced80c6af5f0103c9228"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:30:04 crc kubenswrapper[4829]: I0122 00:30:04.659657 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://dcae494e6dd96d020976f8bde0ed9bba1101db5e0723ced80c6af5f0103c9228" gracePeriod=600 Jan 22 00:30:05 crc kubenswrapper[4829]: I0122 00:30:05.254096 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="dcae494e6dd96d020976f8bde0ed9bba1101db5e0723ced80c6af5f0103c9228" exitCode=0 Jan 22 00:30:05 crc kubenswrapper[4829]: I0122 00:30:05.254199 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"dcae494e6dd96d020976f8bde0ed9bba1101db5e0723ced80c6af5f0103c9228"} Jan 22 00:30:05 crc kubenswrapper[4829]: I0122 00:30:05.254406 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061"} Jan 22 00:30:05 crc kubenswrapper[4829]: I0122 00:30:05.254435 4829 scope.go:117] "RemoveContainer" containerID="0483a801a05aaf6e880f016f24ef4b653fbb05a78257dcb329328e91fdd090fc" Jan 22 00:30:16 crc kubenswrapper[4829]: I0122 00:30:16.348098 4829 generic.go:334] "Generic (PLEG): container finished" podID="391f97ab-a0ff-4185-bf21-e9563069d026" containerID="065a702cd4169349980125ee645ea234ab595861863017bb1ab99d82a059032e" exitCode=0 Jan 22 00:30:16 crc kubenswrapper[4829]: I0122 00:30:16.348183 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"391f97ab-a0ff-4185-bf21-e9563069d026","Type":"ContainerDied","Data":"065a702cd4169349980125ee645ea234ab595861863017bb1ab99d82a059032e"} Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.609140 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791368 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-push\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791442 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-pull\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791518 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-buildworkdir\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791575 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-run\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791640 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-proxy-ca-bundles\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791675 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfgxd\" (UniqueName: \"kubernetes.io/projected/391f97ab-a0ff-4185-bf21-e9563069d026-kube-api-access-kfgxd\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791710 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-buildcachedir\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791732 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-build-blob-cache\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791787 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-system-configs\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791810 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-node-pullsecrets\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791840 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-root\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791876 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-ca-bundles\") pod \"391f97ab-a0ff-4185-bf21-e9563069d026\" (UID: \"391f97ab-a0ff-4185-bf21-e9563069d026\") " Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.791982 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.792086 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.792400 4829 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.792413 4829 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/391f97ab-a0ff-4185-bf21-e9563069d026-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.792946 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.793121 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.793423 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.793667 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.797624 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.802787 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-push" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-push") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "builder-dockercfg-ll9bh-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.802853 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-pull" (OuterVolumeSpecName: "builder-dockercfg-ll9bh-pull") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "builder-dockercfg-ll9bh-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.802890 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/391f97ab-a0ff-4185-bf21-e9563069d026-kube-api-access-kfgxd" (OuterVolumeSpecName: "kube-api-access-kfgxd") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "kube-api-access-kfgxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893793 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfgxd\" (UniqueName: \"kubernetes.io/projected/391f97ab-a0ff-4185-bf21-e9563069d026-kube-api-access-kfgxd\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893851 4829 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893877 4829 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893902 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-push\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-push\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893920 4829 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-ll9bh-pull\" (UniqueName: \"kubernetes.io/secret/391f97ab-a0ff-4185-bf21-e9563069d026-builder-dockercfg-ll9bh-pull\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893940 4829 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893963 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.893987 4829 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/391f97ab-a0ff-4185-bf21-e9563069d026-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.904325 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:30:17 crc kubenswrapper[4829]: I0122 00:30:17.995782 4829 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:18 crc kubenswrapper[4829]: I0122 00:30:18.369577 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-webhook-snmp-2-build" event={"ID":"391f97ab-a0ff-4185-bf21-e9563069d026","Type":"ContainerDied","Data":"68548c56a9df0a0d9e659d230355724e126c58c8bb43852a208a28a1e18922bb"} Jan 22 00:30:18 crc kubenswrapper[4829]: I0122 00:30:18.369645 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-webhook-snmp-2-build" Jan 22 00:30:18 crc kubenswrapper[4829]: I0122 00:30:18.370054 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68548c56a9df0a0d9e659d230355724e126c58c8bb43852a208a28a1e18922bb" Jan 22 00:30:18 crc kubenswrapper[4829]: I0122 00:30:18.779351 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "391f97ab-a0ff-4185-bf21-e9563069d026" (UID: "391f97ab-a0ff-4185-bf21-e9563069d026"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:30:18 crc kubenswrapper[4829]: I0122 00:30:18.826365 4829 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/391f97ab-a0ff-4185-bf21-e9563069d026-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.273290 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z"] Jan 22 00:30:24 crc kubenswrapper[4829]: E0122 00:30:24.274035 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7410ae33-4e2e-4147-9890-36c889741f39" containerName="collect-profiles" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.274048 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7410ae33-4e2e-4147-9890-36c889741f39" containerName="collect-profiles" Jan 22 00:30:24 crc kubenswrapper[4829]: E0122 00:30:24.274064 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391f97ab-a0ff-4185-bf21-e9563069d026" containerName="docker-build" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.274070 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="391f97ab-a0ff-4185-bf21-e9563069d026" containerName="docker-build" Jan 22 00:30:24 crc kubenswrapper[4829]: E0122 00:30:24.274081 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391f97ab-a0ff-4185-bf21-e9563069d026" containerName="git-clone" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.274087 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="391f97ab-a0ff-4185-bf21-e9563069d026" containerName="git-clone" Jan 22 00:30:24 crc kubenswrapper[4829]: E0122 00:30:24.274094 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="391f97ab-a0ff-4185-bf21-e9563069d026" containerName="manage-dockerfile" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.274101 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="391f97ab-a0ff-4185-bf21-e9563069d026" containerName="manage-dockerfile" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.274205 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7410ae33-4e2e-4147-9890-36c889741f39" containerName="collect-profiles" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.274218 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="391f97ab-a0ff-4185-bf21-e9563069d026" containerName="docker-build" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.274670 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.279945 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-222tv" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.287729 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z"] Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.303047 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm6sp\" (UniqueName: \"kubernetes.io/projected/ea1e9cc4-4807-47c7-926f-2e9ff44b6f22-kube-api-access-wm6sp\") pod \"smart-gateway-operator-74c8dcd6b5-k6w8z\" (UID: \"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22\") " pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.303098 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/ea1e9cc4-4807-47c7-926f-2e9ff44b6f22-runner\") pod \"smart-gateway-operator-74c8dcd6b5-k6w8z\" (UID: \"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22\") " pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.404169 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm6sp\" (UniqueName: \"kubernetes.io/projected/ea1e9cc4-4807-47c7-926f-2e9ff44b6f22-kube-api-access-wm6sp\") pod \"smart-gateway-operator-74c8dcd6b5-k6w8z\" (UID: \"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22\") " pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.404662 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/ea1e9cc4-4807-47c7-926f-2e9ff44b6f22-runner\") pod \"smart-gateway-operator-74c8dcd6b5-k6w8z\" (UID: \"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22\") " pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.405239 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/ea1e9cc4-4807-47c7-926f-2e9ff44b6f22-runner\") pod \"smart-gateway-operator-74c8dcd6b5-k6w8z\" (UID: \"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22\") " pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.437614 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm6sp\" (UniqueName: \"kubernetes.io/projected/ea1e9cc4-4807-47c7-926f-2e9ff44b6f22-kube-api-access-wm6sp\") pod \"smart-gateway-operator-74c8dcd6b5-k6w8z\" (UID: \"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22\") " pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.605680 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.822203 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z"] Jan 22 00:30:24 crc kubenswrapper[4829]: W0122 00:30:24.835351 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea1e9cc4_4807_47c7_926f_2e9ff44b6f22.slice/crio-c0577f3544b2afcbbc195b6c8b7ecb502a036332933cb9d8084d8888e4a91a51 WatchSource:0}: Error finding container c0577f3544b2afcbbc195b6c8b7ecb502a036332933cb9d8084d8888e4a91a51: Status 404 returned error can't find the container with id c0577f3544b2afcbbc195b6c8b7ecb502a036332933cb9d8084d8888e4a91a51 Jan 22 00:30:24 crc kubenswrapper[4829]: I0122 00:30:24.841046 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 00:30:25 crc kubenswrapper[4829]: I0122 00:30:25.427151 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" event={"ID":"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22","Type":"ContainerStarted","Data":"c0577f3544b2afcbbc195b6c8b7ecb502a036332933cb9d8084d8888e4a91a51"} Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.493880 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9"] Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.495014 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.502087 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-bhmnb" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.542449 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9"] Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.592261 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/e34ba979-1b71-49d7-9dab-0aac0e390ff1-runner\") pod \"service-telemetry-operator-79bb6b48f5-hsqj9\" (UID: \"e34ba979-1b71-49d7-9dab-0aac0e390ff1\") " pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.592453 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpm9q\" (UniqueName: \"kubernetes.io/projected/e34ba979-1b71-49d7-9dab-0aac0e390ff1-kube-api-access-wpm9q\") pod \"service-telemetry-operator-79bb6b48f5-hsqj9\" (UID: \"e34ba979-1b71-49d7-9dab-0aac0e390ff1\") " pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.693848 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/e34ba979-1b71-49d7-9dab-0aac0e390ff1-runner\") pod \"service-telemetry-operator-79bb6b48f5-hsqj9\" (UID: \"e34ba979-1b71-49d7-9dab-0aac0e390ff1\") " pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.693943 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpm9q\" (UniqueName: \"kubernetes.io/projected/e34ba979-1b71-49d7-9dab-0aac0e390ff1-kube-api-access-wpm9q\") pod \"service-telemetry-operator-79bb6b48f5-hsqj9\" (UID: \"e34ba979-1b71-49d7-9dab-0aac0e390ff1\") " pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.694696 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/e34ba979-1b71-49d7-9dab-0aac0e390ff1-runner\") pod \"service-telemetry-operator-79bb6b48f5-hsqj9\" (UID: \"e34ba979-1b71-49d7-9dab-0aac0e390ff1\") " pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.712529 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpm9q\" (UniqueName: \"kubernetes.io/projected/e34ba979-1b71-49d7-9dab-0aac0e390ff1-kube-api-access-wpm9q\") pod \"service-telemetry-operator-79bb6b48f5-hsqj9\" (UID: \"e34ba979-1b71-49d7-9dab-0aac0e390ff1\") " pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:30 crc kubenswrapper[4829]: I0122 00:30:30.822367 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" Jan 22 00:30:38 crc kubenswrapper[4829]: I0122 00:30:38.636998 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9"] Jan 22 00:30:41 crc kubenswrapper[4829]: E0122 00:30:41.366355 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:stable-1.5" Jan 22 00:30:41 crc kubenswrapper[4829]: E0122 00:30:41.366843 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:stable-1.5,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:image-registry.openshift-image-registry.svc:5000/service-telemetry/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1769041820,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wm6sp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-74c8dcd6b5-k6w8z_service-telemetry(ea1e9cc4-4807-47c7-926f-2e9ff44b6f22): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 00:30:41 crc kubenswrapper[4829]: E0122 00:30:41.368647 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" podUID="ea1e9cc4-4807-47c7-926f-2e9ff44b6f22" Jan 22 00:30:41 crc kubenswrapper[4829]: I0122 00:30:41.554309 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" event={"ID":"e34ba979-1b71-49d7-9dab-0aac0e390ff1","Type":"ContainerStarted","Data":"e6a4d4e6bf2daeff66fbcffb2cf4a22fa0ff4478152ae12e13604838120cc158"} Jan 22 00:30:41 crc kubenswrapper[4829]: E0122 00:30:41.555835 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:stable-1.5\\\"\"" pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" podUID="ea1e9cc4-4807-47c7-926f-2e9ff44b6f22" Jan 22 00:30:46 crc kubenswrapper[4829]: I0122 00:30:46.599722 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" event={"ID":"e34ba979-1b71-49d7-9dab-0aac0e390ff1","Type":"ContainerStarted","Data":"d14fcaf7e486408340a796ab0fc682dd4f16c637f226d13d0b33c006eaf76d7a"} Jan 22 00:30:46 crc kubenswrapper[4829]: I0122 00:30:46.631247 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-79bb6b48f5-hsqj9" podStartSLOduration=11.548942729 podStartE2EDuration="16.631214066s" podCreationTimestamp="2026-01-22 00:30:30 +0000 UTC" firstStartedPulling="2026-01-22 00:30:40.649675201 +0000 UTC m=+1418.685917123" lastFinishedPulling="2026-01-22 00:30:45.731946528 +0000 UTC m=+1423.768188460" observedRunningTime="2026-01-22 00:30:46.628705679 +0000 UTC m=+1424.664947661" watchObservedRunningTime="2026-01-22 00:30:46.631214066 +0000 UTC m=+1424.667456018" Jan 22 00:30:57 crc kubenswrapper[4829]: I0122 00:30:57.679651 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" event={"ID":"ea1e9cc4-4807-47c7-926f-2e9ff44b6f22","Type":"ContainerStarted","Data":"f51ced5c2ec1ed8066451189361fdf4f839c5e2ced38afb7b6019b02236e8891"} Jan 22 00:30:57 crc kubenswrapper[4829]: I0122 00:30:57.707928 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-74c8dcd6b5-k6w8z" podStartSLOduration=1.751419533 podStartE2EDuration="33.70790722s" podCreationTimestamp="2026-01-22 00:30:24 +0000 UTC" firstStartedPulling="2026-01-22 00:30:24.840767864 +0000 UTC m=+1402.877009786" lastFinishedPulling="2026-01-22 00:30:56.797255561 +0000 UTC m=+1434.833497473" observedRunningTime="2026-01-22 00:30:57.701593655 +0000 UTC m=+1435.737835587" watchObservedRunningTime="2026-01-22 00:30:57.70790722 +0000 UTC m=+1435.744149142" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.528181 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-5mfjb"] Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.529711 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.534247 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.534265 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.534826 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.534967 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.536378 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.536752 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.536985 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-nhmmb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.603913 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-5mfjb"] Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.656429 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.656745 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-config\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.656841 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.656937 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.657023 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.657116 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-users\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.657208 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t4xf\" (UniqueName: \"kubernetes.io/projected/3a466515-d1ce-4572-a159-a8be2d89a124-kube-api-access-2t4xf\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.758017 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.758327 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.758384 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-users\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.758428 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t4xf\" (UniqueName: \"kubernetes.io/projected/3a466515-d1ce-4572-a159-a8be2d89a124-kube-api-access-2t4xf\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.758461 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.758528 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-config\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.758583 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.760713 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-config\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.764799 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.764860 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.766079 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.769470 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-users\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.771692 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.778908 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t4xf\" (UniqueName: \"kubernetes.io/projected/3a466515-d1ce-4572-a159-a8be2d89a124-kube-api-access-2t4xf\") pod \"default-interconnect-68864d46cb-5mfjb\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:08 crc kubenswrapper[4829]: I0122 00:31:08.858682 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:31:09 crc kubenswrapper[4829]: I0122 00:31:09.064561 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-5mfjb"] Jan 22 00:31:09 crc kubenswrapper[4829]: I0122 00:31:09.766285 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" event={"ID":"3a466515-d1ce-4572-a159-a8be2d89a124","Type":"ContainerStarted","Data":"ed61594a11b1254006d0544646291aa8efd04a388cbc00ad96b6f39d7436e5d0"} Jan 22 00:31:15 crc kubenswrapper[4829]: I0122 00:31:15.818912 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" event={"ID":"3a466515-d1ce-4572-a159-a8be2d89a124","Type":"ContainerStarted","Data":"ee43842d6374876ab34f71d2e335d557db442e6738431128f9b5adfc90dbbca6"} Jan 22 00:31:15 crc kubenswrapper[4829]: I0122 00:31:15.840328 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" podStartSLOduration=2.1827711 podStartE2EDuration="7.840310339s" podCreationTimestamp="2026-01-22 00:31:08 +0000 UTC" firstStartedPulling="2026-01-22 00:31:09.076397712 +0000 UTC m=+1447.112639634" lastFinishedPulling="2026-01-22 00:31:14.733936961 +0000 UTC m=+1452.770178873" observedRunningTime="2026-01-22 00:31:15.837452511 +0000 UTC m=+1453.873694423" watchObservedRunningTime="2026-01-22 00:31:15.840310339 +0000 UTC m=+1453.876552251" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.768322 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.770002 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.774367 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-2" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.774372 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.774734 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.778275 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.778581 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-1" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.778803 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.779006 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.779178 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.779350 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.779669 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-jdn68" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847254 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-config-out\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847319 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847388 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847415 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847439 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-tls-assets\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847464 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65b5r\" (UniqueName: \"kubernetes.io/projected/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-kube-api-access-65b5r\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847495 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847522 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-web-config\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847568 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847596 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-config\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847628 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.847654 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.884451 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948291 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-config-out\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948358 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948409 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948430 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948445 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-tls-assets\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948463 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65b5r\" (UniqueName: \"kubernetes.io/projected/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-kube-api-access-65b5r\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948486 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948503 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-web-config\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948527 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948568 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-config\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948596 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.948618 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.949236 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: E0122 00:31:19.950244 4829 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 22 00:31:19 crc kubenswrapper[4829]: E0122 00:31:19.950452 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls podName:9f9e74a3-14d1-45b0-9c9f-1fa93676002a nodeName:}" failed. No retries permitted until 2026-01-22 00:31:20.450423028 +0000 UTC m=+1458.486664960 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "9f9e74a3-14d1-45b0-9c9f-1fa93676002a") : secret "default-prometheus-proxy-tls" not found Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.950614 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.950830 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.951019 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.954463 4829 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.954800 4829 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5dbdd6163d6bd8f5e2f02b13d995b6255450e1066dac8bc6a6f9d1f9aa32f7d0/globalmount\"" pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.957191 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-web-config\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.959010 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-tls-assets\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.959796 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-config-out\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.963719 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-config\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.966687 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65b5r\" (UniqueName: \"kubernetes.io/projected/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-kube-api-access-65b5r\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.972218 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:19 crc kubenswrapper[4829]: I0122 00:31:19.979822 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8715d40a-e340-4685-87d7-420c45b2f4b1\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:20 crc kubenswrapper[4829]: I0122 00:31:20.455849 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:20 crc kubenswrapper[4829]: E0122 00:31:20.456003 4829 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 22 00:31:20 crc kubenswrapper[4829]: E0122 00:31:20.456067 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls podName:9f9e74a3-14d1-45b0-9c9f-1fa93676002a nodeName:}" failed. No retries permitted until 2026-01-22 00:31:21.45605017 +0000 UTC m=+1459.492292082 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "9f9e74a3-14d1-45b0-9c9f-1fa93676002a") : secret "default-prometheus-proxy-tls" not found Jan 22 00:31:21 crc kubenswrapper[4829]: I0122 00:31:21.470499 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:21 crc kubenswrapper[4829]: I0122 00:31:21.476912 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/9f9e74a3-14d1-45b0-9c9f-1fa93676002a-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"9f9e74a3-14d1-45b0-9c9f-1fa93676002a\") " pod="service-telemetry/prometheus-default-0" Jan 22 00:31:21 crc kubenswrapper[4829]: I0122 00:31:21.604865 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 22 00:31:22 crc kubenswrapper[4829]: I0122 00:31:22.048124 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 22 00:31:22 crc kubenswrapper[4829]: I0122 00:31:22.863452 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"9f9e74a3-14d1-45b0-9c9f-1fa93676002a","Type":"ContainerStarted","Data":"454ec0ef269632ec89bef53adc3e93d9a60cce7f225bc884340d11b54714bd91"} Jan 22 00:31:26 crc kubenswrapper[4829]: I0122 00:31:26.896382 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"9f9e74a3-14d1-45b0-9c9f-1fa93676002a","Type":"ContainerStarted","Data":"7e6b203220254215967c368b8e0f7965f6c83aa92b12ad2f72a67a7f7115f049"} Jan 22 00:31:29 crc kubenswrapper[4829]: I0122 00:31:29.564717 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-qxqkv"] Jan 22 00:31:29 crc kubenswrapper[4829]: I0122 00:31:29.566264 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" Jan 22 00:31:29 crc kubenswrapper[4829]: I0122 00:31:29.573441 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-qxqkv"] Jan 22 00:31:29 crc kubenswrapper[4829]: I0122 00:31:29.679818 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbmv5\" (UniqueName: \"kubernetes.io/projected/956fb702-682a-4908-9ce8-4572084b0d4e-kube-api-access-rbmv5\") pod \"default-snmp-webhook-6856cfb745-qxqkv\" (UID: \"956fb702-682a-4908-9ce8-4572084b0d4e\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" Jan 22 00:31:29 crc kubenswrapper[4829]: I0122 00:31:29.781126 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbmv5\" (UniqueName: \"kubernetes.io/projected/956fb702-682a-4908-9ce8-4572084b0d4e-kube-api-access-rbmv5\") pod \"default-snmp-webhook-6856cfb745-qxqkv\" (UID: \"956fb702-682a-4908-9ce8-4572084b0d4e\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" Jan 22 00:31:29 crc kubenswrapper[4829]: I0122 00:31:29.806121 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbmv5\" (UniqueName: \"kubernetes.io/projected/956fb702-682a-4908-9ce8-4572084b0d4e-kube-api-access-rbmv5\") pod \"default-snmp-webhook-6856cfb745-qxqkv\" (UID: \"956fb702-682a-4908-9ce8-4572084b0d4e\") " pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" Jan 22 00:31:29 crc kubenswrapper[4829]: I0122 00:31:29.886330 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" Jan 22 00:31:30 crc kubenswrapper[4829]: I0122 00:31:30.125768 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-6856cfb745-qxqkv"] Jan 22 00:31:30 crc kubenswrapper[4829]: I0122 00:31:30.922952 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" event={"ID":"956fb702-682a-4908-9ce8-4572084b0d4e","Type":"ContainerStarted","Data":"1f81d31847983ccba32bef0921829bcd6c26c8f0dd327592c757e2e148f7383e"} Jan 22 00:31:32 crc kubenswrapper[4829]: I0122 00:31:32.938205 4829 generic.go:334] "Generic (PLEG): container finished" podID="9f9e74a3-14d1-45b0-9c9f-1fa93676002a" containerID="7e6b203220254215967c368b8e0f7965f6c83aa92b12ad2f72a67a7f7115f049" exitCode=0 Jan 22 00:31:32 crc kubenswrapper[4829]: I0122 00:31:32.938277 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"9f9e74a3-14d1-45b0-9c9f-1fa93676002a","Type":"ContainerDied","Data":"7e6b203220254215967c368b8e0f7965f6c83aa92b12ad2f72a67a7f7115f049"} Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.185152 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.186622 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.188726 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-cluster-tls-config" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.189390 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.189518 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.189658 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-9j4cv" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.190250 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.193180 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.211224 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335728 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335800 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335839 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335855 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-tls-assets\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335877 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-config-volume\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335897 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335918 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-web-config\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335944 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xhr9\" (UniqueName: \"kubernetes.io/projected/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-kube-api-access-9xhr9\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.335958 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-config-out\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437012 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-config-volume\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437092 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437142 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-web-config\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437192 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xhr9\" (UniqueName: \"kubernetes.io/projected/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-kube-api-access-9xhr9\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437221 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-config-out\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437256 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437319 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437361 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.437381 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-tls-assets\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: E0122 00:31:33.438774 4829 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 22 00:31:33 crc kubenswrapper[4829]: E0122 00:31:33.438867 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls podName:cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6 nodeName:}" failed. No retries permitted until 2026-01-22 00:31:33.938840513 +0000 UTC m=+1471.975082485 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6") : secret "default-alertmanager-proxy-tls" not found Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.444582 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-config-out\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.445108 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-tls-assets\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.449513 4829 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.449625 4829 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/22b98c08ea1a3183be4426899e6d739b5880382cd2ba626519dd741804beecdb/globalmount\"" pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.452672 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-web-config\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.454740 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.460144 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-config-volume\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.464399 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xhr9\" (UniqueName: \"kubernetes.io/projected/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-kube-api-access-9xhr9\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.468129 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.486024 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-4fdfd7e9-eb68-4791-a95e-df4d10b9e275\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: I0122 00:31:33.944222 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:33 crc kubenswrapper[4829]: E0122 00:31:33.944391 4829 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 22 00:31:33 crc kubenswrapper[4829]: E0122 00:31:33.944450 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls podName:cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6 nodeName:}" failed. No retries permitted until 2026-01-22 00:31:34.944434614 +0000 UTC m=+1472.980676526 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6") : secret "default-alertmanager-proxy-tls" not found Jan 22 00:31:34 crc kubenswrapper[4829]: I0122 00:31:34.959605 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:34 crc kubenswrapper[4829]: E0122 00:31:34.959822 4829 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 22 00:31:34 crc kubenswrapper[4829]: E0122 00:31:34.959899 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls podName:cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6 nodeName:}" failed. No retries permitted until 2026-01-22 00:31:36.959881887 +0000 UTC m=+1474.996123799 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6") : secret "default-alertmanager-proxy-tls" not found Jan 22 00:31:36 crc kubenswrapper[4829]: I0122 00:31:36.987491 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:36 crc kubenswrapper[4829]: I0122 00:31:36.993177 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6\") " pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:37 crc kubenswrapper[4829]: I0122 00:31:37.108164 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 22 00:31:38 crc kubenswrapper[4829]: I0122 00:31:38.630686 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 22 00:31:38 crc kubenswrapper[4829]: I0122 00:31:38.984093 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" event={"ID":"956fb702-682a-4908-9ce8-4572084b0d4e","Type":"ContainerStarted","Data":"8cb3a931d7df99c895db43dda9ad5587432a06bcf36a3e97590f85c774e0d67f"} Jan 22 00:31:39 crc kubenswrapper[4829]: I0122 00:31:39.011316 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-6856cfb745-qxqkv" podStartSLOduration=1.8491802910000001 podStartE2EDuration="10.011295659s" podCreationTimestamp="2026-01-22 00:31:29 +0000 UTC" firstStartedPulling="2026-01-22 00:31:30.131383261 +0000 UTC m=+1468.167625183" lastFinishedPulling="2026-01-22 00:31:38.293498639 +0000 UTC m=+1476.329740551" observedRunningTime="2026-01-22 00:31:39.000573958 +0000 UTC m=+1477.036815880" watchObservedRunningTime="2026-01-22 00:31:39.011295659 +0000 UTC m=+1477.047537571" Jan 22 00:31:39 crc kubenswrapper[4829]: I0122 00:31:39.992801 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6","Type":"ContainerStarted","Data":"e2a16a52be072fffe0a84ad20f27186c57057a4748073e67f9644e3d543814f2"} Jan 22 00:31:43 crc kubenswrapper[4829]: I0122 00:31:43.015974 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6","Type":"ContainerStarted","Data":"197244f9f16452798879cdf70f73e3a4bf0343dd48f587c96fd750d66fec4ba5"} Jan 22 00:31:45 crc kubenswrapper[4829]: I0122 00:31:45.029829 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"9f9e74a3-14d1-45b0-9c9f-1fa93676002a","Type":"ContainerStarted","Data":"b1e2ba416ff4a5541520e21e1bdbcc91364559c80e73e9fafc63f64b1f29124a"} Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.042411 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"9f9e74a3-14d1-45b0-9c9f-1fa93676002a","Type":"ContainerStarted","Data":"3cc87b482564544fc75ae272556218c18cd39f24589cd56a0f1909a2c8f038a2"} Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.943857 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6"] Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.945218 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.947756 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.947873 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-qcgcq" Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.948095 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.949672 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Jan 22 00:31:47 crc kubenswrapper[4829]: I0122 00:31:47.954684 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6"] Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.057711 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.057754 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.057791 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.058038 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs5fc\" (UniqueName: \"kubernetes.io/projected/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-kube-api-access-rs5fc\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.058087 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.159157 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs5fc\" (UniqueName: \"kubernetes.io/projected/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-kube-api-access-rs5fc\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.159204 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.159236 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.159259 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.159296 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: E0122 00:31:48.159703 4829 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 22 00:31:48 crc kubenswrapper[4829]: E0122 00:31:48.159762 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls podName:ed4e3e4a-9af6-470a-9f31-9bb2f31e166d nodeName:}" failed. No retries permitted until 2026-01-22 00:31:48.65974708 +0000 UTC m=+1486.695988992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" (UID: "ed4e3e4a-9af6-470a-9f31-9bb2f31e166d") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.159829 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.160698 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.165700 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.183127 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs5fc\" (UniqueName: \"kubernetes.io/projected/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-kube-api-access-rs5fc\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: I0122 00:31:48.666307 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:48 crc kubenswrapper[4829]: E0122 00:31:48.666449 4829 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 22 00:31:48 crc kubenswrapper[4829]: E0122 00:31:48.666498 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls podName:ed4e3e4a-9af6-470a-9f31-9bb2f31e166d nodeName:}" failed. No retries permitted until 2026-01-22 00:31:49.666483776 +0000 UTC m=+1487.702725688 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" (UID: "ed4e3e4a-9af6-470a-9f31-9bb2f31e166d") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.054853 4829 generic.go:334] "Generic (PLEG): container finished" podID="cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6" containerID="197244f9f16452798879cdf70f73e3a4bf0343dd48f587c96fd750d66fec4ba5" exitCode=0 Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.054897 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6","Type":"ContainerDied","Data":"197244f9f16452798879cdf70f73e3a4bf0343dd48f587c96fd750d66fec4ba5"} Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.678958 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.683828 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/ed4e3e4a-9af6-470a-9f31-9bb2f31e166d-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6\" (UID: \"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.759070 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.855067 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45"] Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.856464 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.858122 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.859999 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.872719 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45"] Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.881033 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.881098 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.881123 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffgj6\" (UniqueName: \"kubernetes.io/projected/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-kube-api-access-ffgj6\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.881189 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.881250 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.982214 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.982271 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.982293 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffgj6\" (UniqueName: \"kubernetes.io/projected/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-kube-api-access-ffgj6\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.982324 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.982356 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: E0122 00:31:49.982519 4829 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 22 00:31:49 crc kubenswrapper[4829]: E0122 00:31:49.982609 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls podName:6bff65e7-d317-42b0-92fd-79aa3d4a6d7a nodeName:}" failed. No retries permitted until 2026-01-22 00:31:50.482586047 +0000 UTC m=+1488.518827959 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" (UID: "6bff65e7-d317-42b0-92fd-79aa3d4a6d7a") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.982751 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.983283 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:49 crc kubenswrapper[4829]: I0122 00:31:49.987108 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:50 crc kubenswrapper[4829]: I0122 00:31:50.000433 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffgj6\" (UniqueName: \"kubernetes.io/projected/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-kube-api-access-ffgj6\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:50 crc kubenswrapper[4829]: I0122 00:31:50.489399 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:50 crc kubenswrapper[4829]: E0122 00:31:50.489555 4829 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 22 00:31:50 crc kubenswrapper[4829]: E0122 00:31:50.489610 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls podName:6bff65e7-d317-42b0-92fd-79aa3d4a6d7a nodeName:}" failed. No retries permitted until 2026-01-22 00:31:51.489594901 +0000 UTC m=+1489.525836813 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" (UID: "6bff65e7-d317-42b0-92fd-79aa3d4a6d7a") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 22 00:31:51 crc kubenswrapper[4829]: I0122 00:31:51.503533 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:51 crc kubenswrapper[4829]: I0122 00:31:51.508426 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/6bff65e7-d317-42b0-92fd-79aa3d4a6d7a-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45\" (UID: \"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:51 crc kubenswrapper[4829]: I0122 00:31:51.673588 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" Jan 22 00:31:52 crc kubenswrapper[4829]: I0122 00:31:52.817365 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6"] Jan 22 00:31:52 crc kubenswrapper[4829]: I0122 00:31:52.824410 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45"] Jan 22 00:31:53 crc kubenswrapper[4829]: I0122 00:31:53.081728 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"9f9e74a3-14d1-45b0-9c9f-1fa93676002a","Type":"ContainerStarted","Data":"9a2ef9191eac42fe521752eb761fde42b1483dfbf4450e61a5cd916c6005097a"} Jan 22 00:31:53 crc kubenswrapper[4829]: I0122 00:31:53.117569 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.770651318 podStartE2EDuration="35.117549049s" podCreationTimestamp="2026-01-22 00:31:18 +0000 UTC" firstStartedPulling="2026-01-22 00:31:22.041818387 +0000 UTC m=+1460.078060299" lastFinishedPulling="2026-01-22 00:31:52.388716118 +0000 UTC m=+1490.424958030" observedRunningTime="2026-01-22 00:31:53.103734572 +0000 UTC m=+1491.139976484" watchObservedRunningTime="2026-01-22 00:31:53.117549049 +0000 UTC m=+1491.153790961" Jan 22 00:31:53 crc kubenswrapper[4829]: W0122 00:31:53.602256 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bff65e7_d317_42b0_92fd_79aa3d4a6d7a.slice/crio-c940e41c0f13ec6c78c09c7fea4b6901037e822a989faa763173bd8116b4d3bc WatchSource:0}: Error finding container c940e41c0f13ec6c78c09c7fea4b6901037e822a989faa763173bd8116b4d3bc: Status 404 returned error can't find the container with id c940e41c0f13ec6c78c09c7fea4b6901037e822a989faa763173bd8116b4d3bc Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.089346 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerStarted","Data":"34d5b336edd59b9efcb0f0ed9d00dfe049e6142f34cd09691a71d661cbdf6967"} Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.091108 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6","Type":"ContainerStarted","Data":"a7aba0f64384a6a72a0bf7a4ff6dca1fc4d47b31b6c1b27bb9e4f4946ac90b2a"} Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.092904 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerStarted","Data":"c940e41c0f13ec6c78c09c7fea4b6901037e822a989faa763173bd8116b4d3bc"} Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.112500 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf"] Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.113881 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.116349 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.117104 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.129127 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf"] Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.142880 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.142956 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.142985 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw72b\" (UniqueName: \"kubernetes.io/projected/e0451c51-c962-4cad-88e1-4e58c936f3c4-kube-api-access-kw72b\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.143020 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e0451c51-c962-4cad-88e1-4e58c936f3c4-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.143047 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e0451c51-c962-4cad-88e1-4e58c936f3c4-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.243992 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.244049 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.244090 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw72b\" (UniqueName: \"kubernetes.io/projected/e0451c51-c962-4cad-88e1-4e58c936f3c4-kube-api-access-kw72b\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.244124 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e0451c51-c962-4cad-88e1-4e58c936f3c4-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.244184 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e0451c51-c962-4cad-88e1-4e58c936f3c4-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: E0122 00:31:54.244259 4829 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 22 00:31:54 crc kubenswrapper[4829]: E0122 00:31:54.244349 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls podName:e0451c51-c962-4cad-88e1-4e58c936f3c4 nodeName:}" failed. No retries permitted until 2026-01-22 00:31:54.744326959 +0000 UTC m=+1492.780568881 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" (UID: "e0451c51-c962-4cad-88e1-4e58c936f3c4") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.247985 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/e0451c51-c962-4cad-88e1-4e58c936f3c4-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.248313 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.248364 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/e0451c51-c962-4cad-88e1-4e58c936f3c4-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.267336 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw72b\" (UniqueName: \"kubernetes.io/projected/e0451c51-c962-4cad-88e1-4e58c936f3c4-kube-api-access-kw72b\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: I0122 00:31:54.758011 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:54 crc kubenswrapper[4829]: E0122 00:31:54.758270 4829 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 22 00:31:54 crc kubenswrapper[4829]: E0122 00:31:54.758397 4829 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls podName:e0451c51-c962-4cad-88e1-4e58c936f3c4 nodeName:}" failed. No retries permitted until 2026-01-22 00:31:55.758367881 +0000 UTC m=+1493.794609793 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" (UID: "e0451c51-c962-4cad-88e1-4e58c936f3c4") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 22 00:31:55 crc kubenswrapper[4829]: I0122 00:31:55.105002 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerStarted","Data":"d0d43129deff8d0dc1f1fe0e39030ef1c02fc44a4e2bebcc7798d3988c30b10a"} Jan 22 00:31:55 crc kubenswrapper[4829]: I0122 00:31:55.114019 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerStarted","Data":"2be9cf612e36bebf6cc8cb77242d9230a5ffa8703525262a5464b6cf80726ffe"} Jan 22 00:31:55 crc kubenswrapper[4829]: I0122 00:31:55.773605 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:55 crc kubenswrapper[4829]: I0122 00:31:55.778920 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/e0451c51-c962-4cad-88e1-4e58c936f3c4-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf\" (UID: \"e0451c51-c962-4cad-88e1-4e58c936f3c4\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:55 crc kubenswrapper[4829]: I0122 00:31:55.935179 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" Jan 22 00:31:56 crc kubenswrapper[4829]: I0122 00:31:56.132122 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6","Type":"ContainerStarted","Data":"3e66cacfcb71f2410528daf58260fcc567849905f8170a6c4acaac674cc105f0"} Jan 22 00:31:56 crc kubenswrapper[4829]: I0122 00:31:56.407786 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf"] Jan 22 00:31:56 crc kubenswrapper[4829]: I0122 00:31:56.605122 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Jan 22 00:31:57 crc kubenswrapper[4829]: I0122 00:31:57.140566 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6","Type":"ContainerStarted","Data":"ac8b3f324009d66c0d261c617a40faaee3f014e1dc482811d9939915286467e7"} Jan 22 00:31:57 crc kubenswrapper[4829]: I0122 00:31:57.142331 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerStarted","Data":"eb19da44145eca41b4a5fb2d9a65ee8656f25829b9cf7f6f3a3728669f8080cb"} Jan 22 00:31:57 crc kubenswrapper[4829]: I0122 00:31:57.164956 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=18.213177555 podStartE2EDuration="25.164942467s" podCreationTimestamp="2026-01-22 00:31:32 +0000 UTC" firstStartedPulling="2026-01-22 00:31:49.05650047 +0000 UTC m=+1487.092742372" lastFinishedPulling="2026-01-22 00:31:56.008265372 +0000 UTC m=+1494.044507284" observedRunningTime="2026-01-22 00:31:57.160126218 +0000 UTC m=+1495.196368130" watchObservedRunningTime="2026-01-22 00:31:57.164942467 +0000 UTC m=+1495.201184379" Jan 22 00:31:58 crc kubenswrapper[4829]: I0122 00:31:58.160627 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerStarted","Data":"c3a4f87be1bda1616ae93b675bc9e7d2e636532b2527f60dd67404cee59f1c69"} Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.826293 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd"] Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.841897 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd"] Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.842005 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.844404 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.846072 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.854098 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/975d0f01-de8e-402c-aa4b-f582673781e9-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.854142 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/975d0f01-de8e-402c-aa4b-f582673781e9-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.854185 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/975d0f01-de8e-402c-aa4b-f582673781e9-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.854226 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dbc6\" (UniqueName: \"kubernetes.io/projected/975d0f01-de8e-402c-aa4b-f582673781e9-kube-api-access-2dbc6\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.955894 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/975d0f01-de8e-402c-aa4b-f582673781e9-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.955952 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dbc6\" (UniqueName: \"kubernetes.io/projected/975d0f01-de8e-402c-aa4b-f582673781e9-kube-api-access-2dbc6\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.956031 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/975d0f01-de8e-402c-aa4b-f582673781e9-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.956053 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/975d0f01-de8e-402c-aa4b-f582673781e9-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.956556 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/975d0f01-de8e-402c-aa4b-f582673781e9-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.958332 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/975d0f01-de8e-402c-aa4b-f582673781e9-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.962414 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/975d0f01-de8e-402c-aa4b-f582673781e9-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:00 crc kubenswrapper[4829]: I0122 00:32:00.984235 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dbc6\" (UniqueName: \"kubernetes.io/projected/975d0f01-de8e-402c-aa4b-f582673781e9-kube-api-access-2dbc6\") pod \"default-cloud1-coll-event-smartgateway-86889d79-n55rd\" (UID: \"975d0f01-de8e-402c-aa4b-f582673781e9\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:01 crc kubenswrapper[4829]: I0122 00:32:01.185985 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerStarted","Data":"97f8a5359497737913a39c9385863152ae1ab0230bfe38eb741f1bd51dce2403"} Jan 22 00:32:01 crc kubenswrapper[4829]: I0122 00:32:01.200832 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" Jan 22 00:32:01 crc kubenswrapper[4829]: I0122 00:32:01.664709 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd"] Jan 22 00:32:01 crc kubenswrapper[4829]: W0122 00:32:01.671669 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod975d0f01_de8e_402c_aa4b_f582673781e9.slice/crio-cb3ebb8d6335d722eb9da304b490a9139d004a145232b038e0ae65eaaad07de6 WatchSource:0}: Error finding container cb3ebb8d6335d722eb9da304b490a9139d004a145232b038e0ae65eaaad07de6: Status 404 returned error can't find the container with id cb3ebb8d6335d722eb9da304b490a9139d004a145232b038e0ae65eaaad07de6 Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.009254 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55"] Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.010236 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.013027 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.051763 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55"] Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.080845 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.081140 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.081304 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9f6b\" (UniqueName: \"kubernetes.io/projected/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-kube-api-access-m9f6b\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.081413 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.182816 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.182880 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9f6b\" (UniqueName: \"kubernetes.io/projected/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-kube-api-access-m9f6b\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.182950 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.183011 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.183352 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.184273 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.189288 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.194938 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerStarted","Data":"6c04533c09868887c2cbd4ca21aebd564472f8bea13b1abff2b4d9b43bcfbad0"} Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.195818 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" event={"ID":"975d0f01-de8e-402c-aa4b-f582673781e9","Type":"ContainerStarted","Data":"44c3bfafa9dc429848f3ab6f78a2ddb6e925d409e738847cbebae68f0803378c"} Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.195837 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" event={"ID":"975d0f01-de8e-402c-aa4b-f582673781e9","Type":"ContainerStarted","Data":"cb3ebb8d6335d722eb9da304b490a9139d004a145232b038e0ae65eaaad07de6"} Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.198723 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerStarted","Data":"a28bb73e65d606a4888fe1396d6d61a1125aed4a6d8f2247d00e9db71c56acb9"} Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.199327 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9f6b\" (UniqueName: \"kubernetes.io/projected/8adb7c3d-24d0-462c-a2bb-ff7533df28d6-kube-api-access-m9f6b\") pod \"default-cloud1-ceil-event-smartgateway-f964b986c-5ln55\" (UID: \"8adb7c3d-24d0-462c-a2bb-ff7533df28d6\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.325874 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" Jan 22 00:32:02 crc kubenswrapper[4829]: I0122 00:32:02.811440 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55"] Jan 22 00:32:02 crc kubenswrapper[4829]: W0122 00:32:02.831425 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8adb7c3d_24d0_462c_a2bb_ff7533df28d6.slice/crio-8eb45138d8e144e9c29a0a45444c9677be82737e847610d8031db75d0b044b22 WatchSource:0}: Error finding container 8eb45138d8e144e9c29a0a45444c9677be82737e847610d8031db75d0b044b22: Status 404 returned error can't find the container with id 8eb45138d8e144e9c29a0a45444c9677be82737e847610d8031db75d0b044b22 Jan 22 00:32:03 crc kubenswrapper[4829]: I0122 00:32:03.221025 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" event={"ID":"8adb7c3d-24d0-462c-a2bb-ff7533df28d6","Type":"ContainerStarted","Data":"0cf97efb1848cdb89a22f6cc5cd8d3fa8a4cd3e607572ee0ab47c0f5bf6caa14"} Jan 22 00:32:03 crc kubenswrapper[4829]: I0122 00:32:03.221065 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" event={"ID":"8adb7c3d-24d0-462c-a2bb-ff7533df28d6","Type":"ContainerStarted","Data":"8eb45138d8e144e9c29a0a45444c9677be82737e847610d8031db75d0b044b22"} Jan 22 00:32:04 crc kubenswrapper[4829]: I0122 00:32:04.658908 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:32:04 crc kubenswrapper[4829]: I0122 00:32:04.659275 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:32:06 crc kubenswrapper[4829]: I0122 00:32:06.605722 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Jan 22 00:32:06 crc kubenswrapper[4829]: I0122 00:32:06.666881 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Jan 22 00:32:07 crc kubenswrapper[4829]: I0122 00:32:07.297726 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.272065 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" event={"ID":"975d0f01-de8e-402c-aa4b-f582673781e9","Type":"ContainerStarted","Data":"bc2cb18028982bb57108728f6ff3ba1cf0401646127f379886a8e35cfdbbef36"} Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.274243 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerStarted","Data":"d7e13c6f4b97e35477559e10ff6ce1e777c244da4cb3a15bcb2d69f66677c8f6"} Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.275988 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerStarted","Data":"09d18002d1c49833f85a042c0beb744c6e8097a6aaf30ac6318db8db6fca2eb0"} Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.277523 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" event={"ID":"8adb7c3d-24d0-462c-a2bb-ff7533df28d6","Type":"ContainerStarted","Data":"75c1013cd5408fc005bda2bf0c5f305e8dc68bb6e7f4d471f242b567e38c4654"} Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.281272 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerStarted","Data":"772f8eb252407a80587d33675fedc6207795a82d0d1cade771de61f67d0be8d6"} Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.319525 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" podStartSLOduration=2.493290917 podStartE2EDuration="10.319500295s" podCreationTimestamp="2026-01-22 00:32:00 +0000 UTC" firstStartedPulling="2026-01-22 00:32:01.675077908 +0000 UTC m=+1499.711319820" lastFinishedPulling="2026-01-22 00:32:09.501287286 +0000 UTC m=+1507.537529198" observedRunningTime="2026-01-22 00:32:10.297299687 +0000 UTC m=+1508.333541639" watchObservedRunningTime="2026-01-22 00:32:10.319500295 +0000 UTC m=+1508.355742247" Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.324318 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" podStartSLOduration=5.647189039 podStartE2EDuration="21.324298513s" podCreationTimestamp="2026-01-22 00:31:49 +0000 UTC" firstStartedPulling="2026-01-22 00:31:53.785929849 +0000 UTC m=+1491.822171761" lastFinishedPulling="2026-01-22 00:32:09.463039323 +0000 UTC m=+1507.499281235" observedRunningTime="2026-01-22 00:32:10.313600982 +0000 UTC m=+1508.349842914" watchObservedRunningTime="2026-01-22 00:32:10.324298513 +0000 UTC m=+1508.360540455" Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.342994 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" podStartSLOduration=2.769929464 podStartE2EDuration="9.342975951s" podCreationTimestamp="2026-01-22 00:32:01 +0000 UTC" firstStartedPulling="2026-01-22 00:32:02.843179756 +0000 UTC m=+1500.879421668" lastFinishedPulling="2026-01-22 00:32:09.416226243 +0000 UTC m=+1507.452468155" observedRunningTime="2026-01-22 00:32:10.338695239 +0000 UTC m=+1508.374937181" watchObservedRunningTime="2026-01-22 00:32:10.342975951 +0000 UTC m=+1508.379217863" Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.371316 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" podStartSLOduration=3.270251706 podStartE2EDuration="16.371291687s" podCreationTimestamp="2026-01-22 00:31:54 +0000 UTC" firstStartedPulling="2026-01-22 00:31:56.420796392 +0000 UTC m=+1494.457038304" lastFinishedPulling="2026-01-22 00:32:09.521836363 +0000 UTC m=+1507.558078285" observedRunningTime="2026-01-22 00:32:10.368594784 +0000 UTC m=+1508.404836706" watchObservedRunningTime="2026-01-22 00:32:10.371291687 +0000 UTC m=+1508.407533599" Jan 22 00:32:10 crc kubenswrapper[4829]: I0122 00:32:10.392778 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" podStartSLOduration=7.526141642 podStartE2EDuration="23.392758131s" podCreationTimestamp="2026-01-22 00:31:47 +0000 UTC" firstStartedPulling="2026-01-22 00:31:53.602330256 +0000 UTC m=+1491.638572158" lastFinishedPulling="2026-01-22 00:32:09.468946735 +0000 UTC m=+1507.505188647" observedRunningTime="2026-01-22 00:32:10.392651009 +0000 UTC m=+1508.428892931" watchObservedRunningTime="2026-01-22 00:32:10.392758131 +0000 UTC m=+1508.429000043" Jan 22 00:32:15 crc kubenswrapper[4829]: I0122 00:32:15.016293 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-5mfjb"] Jan 22 00:32:15 crc kubenswrapper[4829]: I0122 00:32:15.016926 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" podUID="3a466515-d1ce-4572-a159-a8be2d89a124" containerName="default-interconnect" containerID="cri-o://ee43842d6374876ab34f71d2e335d557db442e6738431128f9b5adfc90dbbca6" gracePeriod=30 Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.318901 4829 generic.go:334] "Generic (PLEG): container finished" podID="ed4e3e4a-9af6-470a-9f31-9bb2f31e166d" containerID="97f8a5359497737913a39c9385863152ae1ab0230bfe38eb741f1bd51dce2403" exitCode=0 Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.318940 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerDied","Data":"97f8a5359497737913a39c9385863152ae1ab0230bfe38eb741f1bd51dce2403"} Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.319954 4829 scope.go:117] "RemoveContainer" containerID="97f8a5359497737913a39c9385863152ae1ab0230bfe38eb741f1bd51dce2403" Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.322998 4829 generic.go:334] "Generic (PLEG): container finished" podID="975d0f01-de8e-402c-aa4b-f582673781e9" containerID="44c3bfafa9dc429848f3ab6f78a2ddb6e925d409e738847cbebae68f0803378c" exitCode=0 Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.323058 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" event={"ID":"975d0f01-de8e-402c-aa4b-f582673781e9","Type":"ContainerDied","Data":"44c3bfafa9dc429848f3ab6f78a2ddb6e925d409e738847cbebae68f0803378c"} Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.324233 4829 scope.go:117] "RemoveContainer" containerID="44c3bfafa9dc429848f3ab6f78a2ddb6e925d409e738847cbebae68f0803378c" Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.344185 4829 generic.go:334] "Generic (PLEG): container finished" podID="e0451c51-c962-4cad-88e1-4e58c936f3c4" containerID="6c04533c09868887c2cbd4ca21aebd564472f8bea13b1abff2b4d9b43bcfbad0" exitCode=0 Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.344323 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerDied","Data":"6c04533c09868887c2cbd4ca21aebd564472f8bea13b1abff2b4d9b43bcfbad0"} Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.345317 4829 scope.go:117] "RemoveContainer" containerID="6c04533c09868887c2cbd4ca21aebd564472f8bea13b1abff2b4d9b43bcfbad0" Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.370371 4829 generic.go:334] "Generic (PLEG): container finished" podID="6bff65e7-d317-42b0-92fd-79aa3d4a6d7a" containerID="a28bb73e65d606a4888fe1396d6d61a1125aed4a6d8f2247d00e9db71c56acb9" exitCode=0 Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.370484 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerDied","Data":"a28bb73e65d606a4888fe1396d6d61a1125aed4a6d8f2247d00e9db71c56acb9"} Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.372873 4829 scope.go:117] "RemoveContainer" containerID="a28bb73e65d606a4888fe1396d6d61a1125aed4a6d8f2247d00e9db71c56acb9" Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.407799 4829 generic.go:334] "Generic (PLEG): container finished" podID="8adb7c3d-24d0-462c-a2bb-ff7533df28d6" containerID="0cf97efb1848cdb89a22f6cc5cd8d3fa8a4cd3e607572ee0ab47c0f5bf6caa14" exitCode=0 Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.407870 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" event={"ID":"8adb7c3d-24d0-462c-a2bb-ff7533df28d6","Type":"ContainerDied","Data":"0cf97efb1848cdb89a22f6cc5cd8d3fa8a4cd3e607572ee0ab47c0f5bf6caa14"} Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.408257 4829 scope.go:117] "RemoveContainer" containerID="0cf97efb1848cdb89a22f6cc5cd8d3fa8a4cd3e607572ee0ab47c0f5bf6caa14" Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.432015 4829 generic.go:334] "Generic (PLEG): container finished" podID="3a466515-d1ce-4572-a159-a8be2d89a124" containerID="ee43842d6374876ab34f71d2e335d557db442e6738431128f9b5adfc90dbbca6" exitCode=0 Jan 22 00:32:16 crc kubenswrapper[4829]: I0122 00:32:16.432099 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" event={"ID":"3a466515-d1ce-4572-a159-a8be2d89a124","Type":"ContainerDied","Data":"ee43842d6374876ab34f71d2e335d557db442e6738431128f9b5adfc90dbbca6"} Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.160091 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.201586 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-ccnh9"] Jan 22 00:32:17 crc kubenswrapper[4829]: E0122 00:32:17.202179 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a466515-d1ce-4572-a159-a8be2d89a124" containerName="default-interconnect" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.202201 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a466515-d1ce-4572-a159-a8be2d89a124" containerName="default-interconnect" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.202386 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a466515-d1ce-4572-a159-a8be2d89a124" containerName="default-interconnect" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.203157 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.210562 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-ccnh9"] Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227087 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-credentials\") pod \"3a466515-d1ce-4572-a159-a8be2d89a124\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227473 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-config\") pod \"3a466515-d1ce-4572-a159-a8be2d89a124\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227521 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-ca\") pod \"3a466515-d1ce-4572-a159-a8be2d89a124\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227562 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-users\") pod \"3a466515-d1ce-4572-a159-a8be2d89a124\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227613 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-ca\") pod \"3a466515-d1ce-4572-a159-a8be2d89a124\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227680 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t4xf\" (UniqueName: \"kubernetes.io/projected/3a466515-d1ce-4572-a159-a8be2d89a124-kube-api-access-2t4xf\") pod \"3a466515-d1ce-4572-a159-a8be2d89a124\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227741 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-credentials\") pod \"3a466515-d1ce-4572-a159-a8be2d89a124\" (UID: \"3a466515-d1ce-4572-a159-a8be2d89a124\") " Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.227995 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfd8q\" (UniqueName: \"kubernetes.io/projected/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-kube-api-access-hfd8q\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.228037 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.228069 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.228111 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.228137 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-sasl-users\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.228171 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.228200 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-sasl-config\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.230572 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "3a466515-d1ce-4572-a159-a8be2d89a124" (UID: "3a466515-d1ce-4572-a159-a8be2d89a124"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.238938 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "3a466515-d1ce-4572-a159-a8be2d89a124" (UID: "3a466515-d1ce-4572-a159-a8be2d89a124"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.239380 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "3a466515-d1ce-4572-a159-a8be2d89a124" (UID: "3a466515-d1ce-4572-a159-a8be2d89a124"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.239927 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "3a466515-d1ce-4572-a159-a8be2d89a124" (UID: "3a466515-d1ce-4572-a159-a8be2d89a124"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.240258 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "3a466515-d1ce-4572-a159-a8be2d89a124" (UID: "3a466515-d1ce-4572-a159-a8be2d89a124"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.240512 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a466515-d1ce-4572-a159-a8be2d89a124-kube-api-access-2t4xf" (OuterVolumeSpecName: "kube-api-access-2t4xf") pod "3a466515-d1ce-4572-a159-a8be2d89a124" (UID: "3a466515-d1ce-4572-a159-a8be2d89a124"). InnerVolumeSpecName "kube-api-access-2t4xf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.241502 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "3a466515-d1ce-4572-a159-a8be2d89a124" (UID: "3a466515-d1ce-4572-a159-a8be2d89a124"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.328772 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfd8q\" (UniqueName: \"kubernetes.io/projected/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-kube-api-access-hfd8q\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.329810 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.329928 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330045 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330134 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-sasl-users\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330291 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330411 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-sasl-config\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330569 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t4xf\" (UniqueName: \"kubernetes.io/projected/3a466515-d1ce-4572-a159-a8be2d89a124-kube-api-access-2t4xf\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330674 4829 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330760 4829 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330852 4829 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.330948 4829 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.331030 4829 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-sasl-users\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.331116 4829 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3a466515-d1ce-4572-a159-a8be2d89a124-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.331552 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-sasl-config\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.334020 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-sasl-users\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.334309 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.334828 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.336401 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.340340 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.346766 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfd8q\" (UniqueName: \"kubernetes.io/projected/3ac09cb4-04ec-4360-90c2-5e3d7cb81b87-kube-api-access-hfd8q\") pod \"default-interconnect-68864d46cb-ccnh9\" (UID: \"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87\") " pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.439942 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerStarted","Data":"89e5ac94644e7104d0691e872827eb502b5d008c6028b5e4855e05715119f846"} Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.442817 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerStarted","Data":"5dca49ec52b6143d56fc1696cefc260476c07112f9b0d8994b1d01a95c59eaa9"} Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.444929 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" event={"ID":"8adb7c3d-24d0-462c-a2bb-ff7533df28d6","Type":"ContainerStarted","Data":"b1ac8931a4c2075e2baab7a51399da69bef1bbddc82c6dd6b5fb9c7bb2f8dfd4"} Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.446478 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" event={"ID":"3a466515-d1ce-4572-a159-a8be2d89a124","Type":"ContainerDied","Data":"ed61594a11b1254006d0544646291aa8efd04a388cbc00ad96b6f39d7436e5d0"} Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.446518 4829 scope.go:117] "RemoveContainer" containerID="ee43842d6374876ab34f71d2e335d557db442e6738431128f9b5adfc90dbbca6" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.446489 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-5mfjb" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.449570 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerStarted","Data":"5211fbc8160cea8003c0c803cd35af2012daf7e7498d6dbadc432548859c6dd0"} Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.452103 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" event={"ID":"975d0f01-de8e-402c-aa4b-f582673781e9","Type":"ContainerStarted","Data":"354ea289ddf4112700a72afd0e96212108b8cfac40da137cc717c0aeaa51a670"} Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.522875 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.623523 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-5mfjb"] Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.629252 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-5mfjb"] Jan 22 00:32:17 crc kubenswrapper[4829]: I0122 00:32:17.773304 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-ccnh9"] Jan 22 00:32:17 crc kubenswrapper[4829]: W0122 00:32:17.780424 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ac09cb4_04ec_4360_90c2_5e3d7cb81b87.slice/crio-5816c3c57e00ad6f12c4b541546f951c93a3701b780dd15eb1eb816f03f58512 WatchSource:0}: Error finding container 5816c3c57e00ad6f12c4b541546f951c93a3701b780dd15eb1eb816f03f58512: Status 404 returned error can't find the container with id 5816c3c57e00ad6f12c4b541546f951c93a3701b780dd15eb1eb816f03f58512 Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.473198 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" event={"ID":"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87","Type":"ContainerStarted","Data":"b006f532fb025a190562fd2f48ca9f990508183f82fb52bcf30f7c7efc24aae5"} Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.473892 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" event={"ID":"3ac09cb4-04ec-4360-90c2-5e3d7cb81b87","Type":"ContainerStarted","Data":"5816c3c57e00ad6f12c4b541546f951c93a3701b780dd15eb1eb816f03f58512"} Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.481261 4829 generic.go:334] "Generic (PLEG): container finished" podID="e0451c51-c962-4cad-88e1-4e58c936f3c4" containerID="89e5ac94644e7104d0691e872827eb502b5d008c6028b5e4855e05715119f846" exitCode=0 Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.481359 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerDied","Data":"89e5ac94644e7104d0691e872827eb502b5d008c6028b5e4855e05715119f846"} Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.481429 4829 scope.go:117] "RemoveContainer" containerID="6c04533c09868887c2cbd4ca21aebd564472f8bea13b1abff2b4d9b43bcfbad0" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.482062 4829 scope.go:117] "RemoveContainer" containerID="89e5ac94644e7104d0691e872827eb502b5d008c6028b5e4855e05715119f846" Jan 22 00:32:18 crc kubenswrapper[4829]: E0122 00:32:18.482284 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf_service-telemetry(e0451c51-c962-4cad-88e1-4e58c936f3c4)\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" podUID="e0451c51-c962-4cad-88e1-4e58c936f3c4" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.488943 4829 generic.go:334] "Generic (PLEG): container finished" podID="6bff65e7-d317-42b0-92fd-79aa3d4a6d7a" containerID="5dca49ec52b6143d56fc1696cefc260476c07112f9b0d8994b1d01a95c59eaa9" exitCode=0 Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.489083 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerDied","Data":"5dca49ec52b6143d56fc1696cefc260476c07112f9b0d8994b1d01a95c59eaa9"} Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.489939 4829 scope.go:117] "RemoveContainer" containerID="5dca49ec52b6143d56fc1696cefc260476c07112f9b0d8994b1d01a95c59eaa9" Jan 22 00:32:18 crc kubenswrapper[4829]: E0122 00:32:18.490253 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45_service-telemetry(6bff65e7-d317-42b0-92fd-79aa3d4a6d7a)\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" podUID="6bff65e7-d317-42b0-92fd-79aa3d4a6d7a" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.494804 4829 generic.go:334] "Generic (PLEG): container finished" podID="8adb7c3d-24d0-462c-a2bb-ff7533df28d6" containerID="b1ac8931a4c2075e2baab7a51399da69bef1bbddc82c6dd6b5fb9c7bb2f8dfd4" exitCode=0 Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.494869 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" event={"ID":"8adb7c3d-24d0-462c-a2bb-ff7533df28d6","Type":"ContainerDied","Data":"b1ac8931a4c2075e2baab7a51399da69bef1bbddc82c6dd6b5fb9c7bb2f8dfd4"} Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.495386 4829 scope.go:117] "RemoveContainer" containerID="b1ac8931a4c2075e2baab7a51399da69bef1bbddc82c6dd6b5fb9c7bb2f8dfd4" Jan 22 00:32:18 crc kubenswrapper[4829]: E0122 00:32:18.495709 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-f964b986c-5ln55_service-telemetry(8adb7c3d-24d0-462c-a2bb-ff7533df28d6)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" podUID="8adb7c3d-24d0-462c-a2bb-ff7533df28d6" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.499741 4829 generic.go:334] "Generic (PLEG): container finished" podID="ed4e3e4a-9af6-470a-9f31-9bb2f31e166d" containerID="5211fbc8160cea8003c0c803cd35af2012daf7e7498d6dbadc432548859c6dd0" exitCode=0 Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.499802 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerDied","Data":"5211fbc8160cea8003c0c803cd35af2012daf7e7498d6dbadc432548859c6dd0"} Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.500186 4829 scope.go:117] "RemoveContainer" containerID="5211fbc8160cea8003c0c803cd35af2012daf7e7498d6dbadc432548859c6dd0" Jan 22 00:32:18 crc kubenswrapper[4829]: E0122 00:32:18.500406 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6_service-telemetry(ed4e3e4a-9af6-470a-9f31-9bb2f31e166d)\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" podUID="ed4e3e4a-9af6-470a-9f31-9bb2f31e166d" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.511693 4829 generic.go:334] "Generic (PLEG): container finished" podID="975d0f01-de8e-402c-aa4b-f582673781e9" containerID="354ea289ddf4112700a72afd0e96212108b8cfac40da137cc717c0aeaa51a670" exitCode=0 Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.513138 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" event={"ID":"975d0f01-de8e-402c-aa4b-f582673781e9","Type":"ContainerDied","Data":"354ea289ddf4112700a72afd0e96212108b8cfac40da137cc717c0aeaa51a670"} Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.513846 4829 scope.go:117] "RemoveContainer" containerID="354ea289ddf4112700a72afd0e96212108b8cfac40da137cc717c0aeaa51a670" Jan 22 00:32:18 crc kubenswrapper[4829]: E0122 00:32:18.514108 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-86889d79-n55rd_service-telemetry(975d0f01-de8e-402c-aa4b-f582673781e9)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" podUID="975d0f01-de8e-402c-aa4b-f582673781e9" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.537938 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-ccnh9" podStartSLOduration=3.537912656 podStartE2EDuration="3.537912656s" podCreationTimestamp="2026-01-22 00:32:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 00:32:18.511826128 +0000 UTC m=+1516.548068040" watchObservedRunningTime="2026-01-22 00:32:18.537912656 +0000 UTC m=+1516.574154568" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.564368 4829 scope.go:117] "RemoveContainer" containerID="a28bb73e65d606a4888fe1396d6d61a1125aed4a6d8f2247d00e9db71c56acb9" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.573712 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a466515-d1ce-4572-a159-a8be2d89a124" path="/var/lib/kubelet/pods/3a466515-d1ce-4572-a159-a8be2d89a124/volumes" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.617005 4829 scope.go:117] "RemoveContainer" containerID="0cf97efb1848cdb89a22f6cc5cd8d3fa8a4cd3e607572ee0ab47c0f5bf6caa14" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.670353 4829 scope.go:117] "RemoveContainer" containerID="97f8a5359497737913a39c9385863152ae1ab0230bfe38eb741f1bd51dce2403" Jan 22 00:32:18 crc kubenswrapper[4829]: I0122 00:32:18.717880 4829 scope.go:117] "RemoveContainer" containerID="44c3bfafa9dc429848f3ab6f78a2ddb6e925d409e738847cbebae68f0803378c" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.267799 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.268832 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.271850 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.271992 4829 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.280966 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.305435 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/a7e55abd-fc7a-4fbb-a99d-695e2287e933-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.305505 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/a7e55abd-fc7a-4fbb-a99d-695e2287e933-qdr-test-config\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.305686 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5rlq\" (UniqueName: \"kubernetes.io/projected/a7e55abd-fc7a-4fbb-a99d-695e2287e933-kube-api-access-g5rlq\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.407785 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/a7e55abd-fc7a-4fbb-a99d-695e2287e933-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.407833 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/a7e55abd-fc7a-4fbb-a99d-695e2287e933-qdr-test-config\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.407861 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5rlq\" (UniqueName: \"kubernetes.io/projected/a7e55abd-fc7a-4fbb-a99d-695e2287e933-kube-api-access-g5rlq\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.408793 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/a7e55abd-fc7a-4fbb-a99d-695e2287e933-qdr-test-config\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.418404 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/a7e55abd-fc7a-4fbb-a99d-695e2287e933-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.434182 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5rlq\" (UniqueName: \"kubernetes.io/projected/a7e55abd-fc7a-4fbb-a99d-695e2287e933-kube-api-access-g5rlq\") pod \"qdr-test\" (UID: \"a7e55abd-fc7a-4fbb-a99d-695e2287e933\") " pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.627258 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 22 00:32:20 crc kubenswrapper[4829]: I0122 00:32:20.873320 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 22 00:32:20 crc kubenswrapper[4829]: W0122 00:32:20.895642 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7e55abd_fc7a_4fbb_a99d_695e2287e933.slice/crio-d2753d713dcc04afaa680954cd2741e5c36d918cda3267f652f24103b6762d8a WatchSource:0}: Error finding container d2753d713dcc04afaa680954cd2741e5c36d918cda3267f652f24103b6762d8a: Status 404 returned error can't find the container with id d2753d713dcc04afaa680954cd2741e5c36d918cda3267f652f24103b6762d8a Jan 22 00:32:21 crc kubenswrapper[4829]: I0122 00:32:21.540985 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"a7e55abd-fc7a-4fbb-a99d-695e2287e933","Type":"ContainerStarted","Data":"d2753d713dcc04afaa680954cd2741e5c36d918cda3267f652f24103b6762d8a"} Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.553339 4829 scope.go:117] "RemoveContainer" containerID="5211fbc8160cea8003c0c803cd35af2012daf7e7498d6dbadc432548859c6dd0" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.641785 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"a7e55abd-fc7a-4fbb-a99d-695e2287e933","Type":"ContainerStarted","Data":"7e3e45c9ace5e54bc8db317229e3c66dcc2bb45064682f110cb2d29435c07aec"} Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.670689 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=1.954537387 podStartE2EDuration="10.670661246s" podCreationTimestamp="2026-01-22 00:32:20 +0000 UTC" firstStartedPulling="2026-01-22 00:32:20.908605891 +0000 UTC m=+1518.944847813" lastFinishedPulling="2026-01-22 00:32:29.62472976 +0000 UTC m=+1527.660971672" observedRunningTime="2026-01-22 00:32:30.662884556 +0000 UTC m=+1528.699126498" watchObservedRunningTime="2026-01-22 00:32:30.670661246 +0000 UTC m=+1528.706903198" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.898307 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-nkvjg"] Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.903196 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.904861 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.905995 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.906154 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.906272 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.906455 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.906586 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.913037 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-nkvjg"] Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.978194 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dgj9\" (UniqueName: \"kubernetes.io/projected/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-kube-api-access-6dgj9\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.978296 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.978336 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-config\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.978364 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-sensubility-config\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.978408 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.978460 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-publisher\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:30 crc kubenswrapper[4829]: I0122 00:32:30.978491 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-healthcheck-log\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.079325 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.079476 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-publisher\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.079598 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-healthcheck-log\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.079750 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dgj9\" (UniqueName: \"kubernetes.io/projected/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-kube-api-access-6dgj9\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.079803 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.079874 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-config\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.080191 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-sensubility-config\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.081136 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.081136 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-config\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.081347 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.081461 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-publisher\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.081482 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-healthcheck-log\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.082145 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-sensubility-config\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.124724 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dgj9\" (UniqueName: \"kubernetes.io/projected/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-kube-api-access-6dgj9\") pod \"stf-smoketest-smoke1-nkvjg\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.210513 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.211355 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.218687 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.235391 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.287004 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsmdr\" (UniqueName: \"kubernetes.io/projected/25c58ba2-18e2-4b11-998e-dc2e69b18b80-kube-api-access-wsmdr\") pod \"curl\" (UID: \"25c58ba2-18e2-4b11-998e-dc2e69b18b80\") " pod="service-telemetry/curl" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.388876 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsmdr\" (UniqueName: \"kubernetes.io/projected/25c58ba2-18e2-4b11-998e-dc2e69b18b80-kube-api-access-wsmdr\") pod \"curl\" (UID: \"25c58ba2-18e2-4b11-998e-dc2e69b18b80\") " pod="service-telemetry/curl" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.414556 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsmdr\" (UniqueName: \"kubernetes.io/projected/25c58ba2-18e2-4b11-998e-dc2e69b18b80-kube-api-access-wsmdr\") pod \"curl\" (UID: \"25c58ba2-18e2-4b11-998e-dc2e69b18b80\") " pod="service-telemetry/curl" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.463835 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-nkvjg"] Jan 22 00:32:31 crc kubenswrapper[4829]: W0122 00:32:31.475220 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc82dca4_55c8_4dba_8b80_8c8934cb3bff.slice/crio-f67820938035ae56be4d093df67c6a626fe0e2639a87b876593045ca00673175 WatchSource:0}: Error finding container f67820938035ae56be4d093df67c6a626fe0e2639a87b876593045ca00673175: Status 404 returned error can't find the container with id f67820938035ae56be4d093df67c6a626fe0e2639a87b876593045ca00673175 Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.537132 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.553738 4829 scope.go:117] "RemoveContainer" containerID="89e5ac94644e7104d0691e872827eb502b5d008c6028b5e4855e05715119f846" Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.720316 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6" event={"ID":"ed4e3e4a-9af6-470a-9f31-9bb2f31e166d","Type":"ContainerStarted","Data":"80201ebc76d18309b6d767ac1f0441f9726c3164aa6feb251bc893b8797885d5"} Jan 22 00:32:31 crc kubenswrapper[4829]: I0122 00:32:31.722563 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" event={"ID":"dc82dca4-55c8-4dba-8b80-8c8934cb3bff","Type":"ContainerStarted","Data":"f67820938035ae56be4d093df67c6a626fe0e2639a87b876593045ca00673175"} Jan 22 00:32:32 crc kubenswrapper[4829]: I0122 00:32:32.066776 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 22 00:32:32 crc kubenswrapper[4829]: I0122 00:32:32.559954 4829 scope.go:117] "RemoveContainer" containerID="b1ac8931a4c2075e2baab7a51399da69bef1bbddc82c6dd6b5fb9c7bb2f8dfd4" Jan 22 00:32:32 crc kubenswrapper[4829]: I0122 00:32:32.561097 4829 scope.go:117] "RemoveContainer" containerID="354ea289ddf4112700a72afd0e96212108b8cfac40da137cc717c0aeaa51a670" Jan 22 00:32:32 crc kubenswrapper[4829]: I0122 00:32:32.740465 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf" event={"ID":"e0451c51-c962-4cad-88e1-4e58c936f3c4","Type":"ContainerStarted","Data":"3d29855cb70b82cdd2d4749c3cb989af533d39e2444ff813d27b178b63b3637d"} Jan 22 00:32:32 crc kubenswrapper[4829]: I0122 00:32:32.741934 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"25c58ba2-18e2-4b11-998e-dc2e69b18b80","Type":"ContainerStarted","Data":"895dbeffd9a1cc0edee7835bfc0b4ba2677a60dc81f4967a37441ed7e1889cf0"} Jan 22 00:32:33 crc kubenswrapper[4829]: I0122 00:32:33.553917 4829 scope.go:117] "RemoveContainer" containerID="5dca49ec52b6143d56fc1696cefc260476c07112f9b0d8994b1d01a95c59eaa9" Jan 22 00:32:33 crc kubenswrapper[4829]: I0122 00:32:33.752842 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-86889d79-n55rd" event={"ID":"975d0f01-de8e-402c-aa4b-f582673781e9","Type":"ContainerStarted","Data":"39f0167217ba51144091bfc46fffb5c7997f0a30e3362cfa61c90bc0146ae418"} Jan 22 00:32:33 crc kubenswrapper[4829]: I0122 00:32:33.757145 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-f964b986c-5ln55" event={"ID":"8adb7c3d-24d0-462c-a2bb-ff7533df28d6","Type":"ContainerStarted","Data":"ebe0959e562ebca14e137f908c278127fde5cc295e98fb2b86bf0702dfdcd5cf"} Jan 22 00:32:33 crc kubenswrapper[4829]: E0122 00:32:33.931371 4829 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25c58ba2_18e2_4b11_998e_dc2e69b18b80.slice/crio-77dee341acd038eee9fe852663216b1c9ce1ceab279638f436f1d82cbfb89445.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25c58ba2_18e2_4b11_998e_dc2e69b18b80.slice/crio-conmon-77dee341acd038eee9fe852663216b1c9ce1ceab279638f436f1d82cbfb89445.scope\": RecentStats: unable to find data in memory cache]" Jan 22 00:32:34 crc kubenswrapper[4829]: I0122 00:32:34.658387 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:32:34 crc kubenswrapper[4829]: I0122 00:32:34.658764 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:32:34 crc kubenswrapper[4829]: I0122 00:32:34.767379 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45" event={"ID":"6bff65e7-d317-42b0-92fd-79aa3d4a6d7a","Type":"ContainerStarted","Data":"d460bcda29a30a6634de52128b43d1de0c7a1f26e94c48f4c60db56f3c38b776"} Jan 22 00:32:34 crc kubenswrapper[4829]: I0122 00:32:34.768954 4829 generic.go:334] "Generic (PLEG): container finished" podID="25c58ba2-18e2-4b11-998e-dc2e69b18b80" containerID="77dee341acd038eee9fe852663216b1c9ce1ceab279638f436f1d82cbfb89445" exitCode=0 Jan 22 00:32:34 crc kubenswrapper[4829]: I0122 00:32:34.768990 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"25c58ba2-18e2-4b11-998e-dc2e69b18b80","Type":"ContainerDied","Data":"77dee341acd038eee9fe852663216b1c9ce1ceab279638f436f1d82cbfb89445"} Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.709622 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.809656 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"25c58ba2-18e2-4b11-998e-dc2e69b18b80","Type":"ContainerDied","Data":"895dbeffd9a1cc0edee7835bfc0b4ba2677a60dc81f4967a37441ed7e1889cf0"} Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.810163 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="895dbeffd9a1cc0edee7835bfc0b4ba2677a60dc81f4967a37441ed7e1889cf0" Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.809727 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.830923 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsmdr\" (UniqueName: \"kubernetes.io/projected/25c58ba2-18e2-4b11-998e-dc2e69b18b80-kube-api-access-wsmdr\") pod \"25c58ba2-18e2-4b11-998e-dc2e69b18b80\" (UID: \"25c58ba2-18e2-4b11-998e-dc2e69b18b80\") " Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.838772 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c58ba2-18e2-4b11-998e-dc2e69b18b80-kube-api-access-wsmdr" (OuterVolumeSpecName: "kube-api-access-wsmdr") pod "25c58ba2-18e2-4b11-998e-dc2e69b18b80" (UID: "25c58ba2-18e2-4b11-998e-dc2e69b18b80"). InnerVolumeSpecName "kube-api-access-wsmdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.862453 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_25c58ba2-18e2-4b11-998e-dc2e69b18b80/curl/0.log" Jan 22 00:32:39 crc kubenswrapper[4829]: I0122 00:32:39.933315 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsmdr\" (UniqueName: \"kubernetes.io/projected/25c58ba2-18e2-4b11-998e-dc2e69b18b80-kube-api-access-wsmdr\") on node \"crc\" DevicePath \"\"" Jan 22 00:32:40 crc kubenswrapper[4829]: I0122 00:32:40.100244 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-qxqkv_956fb702-682a-4908-9ce8-4572084b0d4e/prometheus-webhook-snmp/0.log" Jan 22 00:32:44 crc kubenswrapper[4829]: E0122 00:32:44.185981 4829 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/tripleomastercentos9/openstack-collectd:current-tripleo" Jan 22 00:32:44 crc kubenswrapper[4829]: E0122 00:32:44.186710 4829 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:smoketest-collectd,Image:quay.io/tripleomastercentos9/openstack-collectd:current-tripleo,Command:[/smoketest_collectd_entrypoint.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CLOUDNAME,Value:smoke1,ValueFrom:nil,},EnvVar{Name:ELASTICSEARCH_AUTH_PASS,Value:JwjEs7NSWJiXBhJ1CzKHdGwJ,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_AUTH_TOKEN,Value:eyJhbGciOiJSUzI1NiIsImtpZCI6InF6SnFxNFFjbVk5VmJQZ2dNMmUxdHFmTlJlVWx4UDhSTlhIamV3RUx4WU0ifQ.eyJhdWQiOlsiaHR0cHM6Ly9rdWJlcm5ldGVzLmRlZmF1bHQuc3ZjIl0sImV4cCI6MTc2OTA0NTUzNSwiaWF0IjoxNzY5MDQxOTM1LCJpc3MiOiJodHRwczovL2t1YmVybmV0ZXMuZGVmYXVsdC5zdmMiLCJqdGkiOiI1OWU4OTFjOS0xMTdhLTRhMzgtOTIzOC0zNGJkYzE1YWJiMjQiLCJrdWJlcm5ldGVzLmlvIjp7Im5hbWVzcGFjZSI6InNlcnZpY2UtdGVsZW1ldHJ5Iiwic2VydmljZWFjY291bnQiOnsibmFtZSI6InN0Zi1wcm9tZXRoZXVzLXJlYWRlciIsInVpZCI6IjE2YjViMmMxLTdiYWUtNDA5NS1hOTVmLTQwN2M4N2M5YzI3YiJ9fSwibmJmIjoxNzY5MDQxOTM1LCJzdWIiOiJzeXN0ZW06c2VydmljZWFjY291bnQ6c2VydmljZS10ZWxlbWV0cnk6c3RmLXByb21ldGhldXMtcmVhZGVyIn0.Ij0HnM-xMV72UgZTfokcKqsGPpnbft0jZY0rvuhe6Xt4Miqnlo5rpD1TCdaDU-r9wSFw6qhcLyA9GnW0cpSXc333R-cqrnRw7HICbWPwM_lW2nS9HWTdRjnTwkBpumvXd2xWZXcdY-qxD6keMOaUmhkFCPhCH4TUOyf2HbzfzqOgW7IHeBB3XZl48x8NN3MXozPJ8P5S7IDAuzPwiQx__tUH4ONCyhvvsFIq1EdG6XfoTqN2aYKrYEH6ajUiM-jV4dCqdjvTlQk18q4QaND-YAbJ8yagOqmy0V-7BSzjwlY0oc2I2uyvMUt74Xov3DiGwKVqUZDug3ZWZ8FR7RoDCsiRCh6yMu3eHYXN97mQ2XBKSSAV67JXjs4-8n_KWHzyCKKt8lSCJNW_rZRse0LGr7cOzsQGcsHoQgP5hwEHjvWPJjMp7xGxM12wVDuw9DWWhlwN6tOq5geeP8kFRJPrqcSlttDhmTfZsdl1HtaiLoZ6T4j-Gfkuv-hiJQ7wyZdqIcZQYOcK2-yumMrqi6-_VLtHRcGfCLX4aPdN28WCXFrTmTBwiuUzZSwGZL4zikxrhHPlHgo9xlQymr9F3URYcxYhW88rhzEjptJTlZ_FtMXugmQZkdSp8q7Ow2nwzf4Vy0A1YNjeIryQGZ5ceQd2_SQETuzr0rdfAcjHkI4zoa8,ValueFrom:nil,},EnvVar{Name:OBSERVABILITY_STRATEGY,Value:<>,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:collectd-config,ReadOnly:false,MountPath:/etc/minimal-collectd.conf.template,SubPath:minimal-collectd.conf.template,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sensubility-config,ReadOnly:false,MountPath:/etc/collectd-sensubility.conf,SubPath:collectd-sensubility.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:healthcheck-log,ReadOnly:false,MountPath:/healthcheck.log,SubPath:healthcheck.log,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:collectd-entrypoint-script,ReadOnly:false,MountPath:/smoketest_collectd_entrypoint.sh,SubPath:smoketest_collectd_entrypoint.sh,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6dgj9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod stf-smoketest-smoke1-nkvjg_service-telemetry(dc82dca4-55c8-4dba-8b80-8c8934cb3bff): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 22 00:32:49 crc kubenswrapper[4829]: E0122 00:32:49.495198 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" Jan 22 00:32:49 crc kubenswrapper[4829]: I0122 00:32:49.876622 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" event={"ID":"dc82dca4-55c8-4dba-8b80-8c8934cb3bff","Type":"ContainerStarted","Data":"d264c90c4aab44da6651d404cff0980dadb520a58d66d0fb021e595bc1c6c254"} Jan 22 00:32:49 crc kubenswrapper[4829]: E0122 00:32:49.878766 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/tripleomastercentos9/openstack-collectd:current-tripleo\\\"\"" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" Jan 22 00:32:50 crc kubenswrapper[4829]: E0122 00:32:50.886335 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"smoketest-collectd\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/tripleomastercentos9/openstack-collectd:current-tripleo\\\"\"" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.165494 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cb6s5"] Jan 22 00:32:53 crc kubenswrapper[4829]: E0122 00:32:53.166899 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c58ba2-18e2-4b11-998e-dc2e69b18b80" containerName="curl" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.166988 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c58ba2-18e2-4b11-998e-dc2e69b18b80" containerName="curl" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.167212 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c58ba2-18e2-4b11-998e-dc2e69b18b80" containerName="curl" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.168396 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.182810 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cb6s5"] Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.260847 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6n6d\" (UniqueName: \"kubernetes.io/projected/0d683d8b-71e6-4a7b-a2de-0bc30625d346-kube-api-access-m6n6d\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.261009 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-catalog-content\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.261062 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-utilities\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.362485 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-catalog-content\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.362551 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-utilities\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.362622 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6n6d\" (UniqueName: \"kubernetes.io/projected/0d683d8b-71e6-4a7b-a2de-0bc30625d346-kube-api-access-m6n6d\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.363518 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-catalog-content\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.363991 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-utilities\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.382129 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6n6d\" (UniqueName: \"kubernetes.io/projected/0d683d8b-71e6-4a7b-a2de-0bc30625d346-kube-api-access-m6n6d\") pod \"certified-operators-cb6s5\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.490940 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.737991 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cb6s5"] Jan 22 00:32:53 crc kubenswrapper[4829]: I0122 00:32:53.911406 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cb6s5" event={"ID":"0d683d8b-71e6-4a7b-a2de-0bc30625d346","Type":"ContainerStarted","Data":"e15622cd134cb1c422ec56239773776868ff07bedf72ee00ffbddc427ca713d0"} Jan 22 00:32:54 crc kubenswrapper[4829]: I0122 00:32:54.920532 4829 generic.go:334] "Generic (PLEG): container finished" podID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerID="8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8" exitCode=0 Jan 22 00:32:54 crc kubenswrapper[4829]: I0122 00:32:54.920616 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cb6s5" event={"ID":"0d683d8b-71e6-4a7b-a2de-0bc30625d346","Type":"ContainerDied","Data":"8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8"} Jan 22 00:32:56 crc kubenswrapper[4829]: I0122 00:32:56.942434 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cb6s5" event={"ID":"0d683d8b-71e6-4a7b-a2de-0bc30625d346","Type":"ContainerStarted","Data":"c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5"} Jan 22 00:32:57 crc kubenswrapper[4829]: I0122 00:32:57.951584 4829 generic.go:334] "Generic (PLEG): container finished" podID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerID="c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5" exitCode=0 Jan 22 00:32:57 crc kubenswrapper[4829]: I0122 00:32:57.951639 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cb6s5" event={"ID":"0d683d8b-71e6-4a7b-a2de-0bc30625d346","Type":"ContainerDied","Data":"c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5"} Jan 22 00:32:58 crc kubenswrapper[4829]: I0122 00:32:58.961414 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cb6s5" event={"ID":"0d683d8b-71e6-4a7b-a2de-0bc30625d346","Type":"ContainerStarted","Data":"53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb"} Jan 22 00:32:58 crc kubenswrapper[4829]: I0122 00:32:58.982681 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cb6s5" podStartSLOduration=2.5863070219999997 podStartE2EDuration="5.982662957s" podCreationTimestamp="2026-01-22 00:32:53 +0000 UTC" firstStartedPulling="2026-01-22 00:32:54.923175185 +0000 UTC m=+1552.959417107" lastFinishedPulling="2026-01-22 00:32:58.31953113 +0000 UTC m=+1556.355773042" observedRunningTime="2026-01-22 00:32:58.979385285 +0000 UTC m=+1557.015627197" watchObservedRunningTime="2026-01-22 00:32:58.982662957 +0000 UTC m=+1557.018904869" Jan 22 00:33:03 crc kubenswrapper[4829]: I0122 00:33:03.491937 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:33:03 crc kubenswrapper[4829]: I0122 00:33:03.492903 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:33:03 crc kubenswrapper[4829]: I0122 00:33:03.555866 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:33:04 crc kubenswrapper[4829]: I0122 00:33:04.059682 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:33:04 crc kubenswrapper[4829]: I0122 00:33:04.115053 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cb6s5"] Jan 22 00:33:04 crc kubenswrapper[4829]: I0122 00:33:04.658130 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:33:04 crc kubenswrapper[4829]: I0122 00:33:04.659613 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:33:04 crc kubenswrapper[4829]: I0122 00:33:04.659895 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:33:04 crc kubenswrapper[4829]: I0122 00:33:04.661215 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:33:04 crc kubenswrapper[4829]: I0122 00:33:04.661503 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" gracePeriod=600 Jan 22 00:33:06 crc kubenswrapper[4829]: I0122 00:33:06.022873 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cb6s5" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="registry-server" containerID="cri-o://53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb" gracePeriod=2 Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.031041 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" exitCode=0 Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.031087 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061"} Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.031125 4829 scope.go:117] "RemoveContainer" containerID="dcae494e6dd96d020976f8bde0ed9bba1101db5e0723ced80c6af5f0103c9228" Jan 22 00:33:07 crc kubenswrapper[4829]: E0122 00:33:07.163581 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.714054 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.880226 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-catalog-content\") pod \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.880997 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6n6d\" (UniqueName: \"kubernetes.io/projected/0d683d8b-71e6-4a7b-a2de-0bc30625d346-kube-api-access-m6n6d\") pod \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.881193 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-utilities\") pod \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\" (UID: \"0d683d8b-71e6-4a7b-a2de-0bc30625d346\") " Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.882781 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-utilities" (OuterVolumeSpecName: "utilities") pod "0d683d8b-71e6-4a7b-a2de-0bc30625d346" (UID: "0d683d8b-71e6-4a7b-a2de-0bc30625d346"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.883408 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.886915 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d683d8b-71e6-4a7b-a2de-0bc30625d346-kube-api-access-m6n6d" (OuterVolumeSpecName: "kube-api-access-m6n6d") pod "0d683d8b-71e6-4a7b-a2de-0bc30625d346" (UID: "0d683d8b-71e6-4a7b-a2de-0bc30625d346"). InnerVolumeSpecName "kube-api-access-m6n6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.940741 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d683d8b-71e6-4a7b-a2de-0bc30625d346" (UID: "0d683d8b-71e6-4a7b-a2de-0bc30625d346"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.984635 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6n6d\" (UniqueName: \"kubernetes.io/projected/0d683d8b-71e6-4a7b-a2de-0bc30625d346-kube-api-access-m6n6d\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:07 crc kubenswrapper[4829]: I0122 00:33:07.984686 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d683d8b-71e6-4a7b-a2de-0bc30625d346-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.041027 4829 generic.go:334] "Generic (PLEG): container finished" podID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerID="53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb" exitCode=0 Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.041119 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cb6s5" event={"ID":"0d683d8b-71e6-4a7b-a2de-0bc30625d346","Type":"ContainerDied","Data":"53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb"} Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.041161 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cb6s5" event={"ID":"0d683d8b-71e6-4a7b-a2de-0bc30625d346","Type":"ContainerDied","Data":"e15622cd134cb1c422ec56239773776868ff07bedf72ee00ffbddc427ca713d0"} Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.041188 4829 scope.go:117] "RemoveContainer" containerID="53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.041363 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cb6s5" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.051344 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:33:08 crc kubenswrapper[4829]: E0122 00:33:08.051807 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.059656 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" event={"ID":"dc82dca4-55c8-4dba-8b80-8c8934cb3bff","Type":"ContainerStarted","Data":"1421dbcd4f3af85a8ca0d3ef59350c43cd03fc2aa22e9e778c130070f696189f"} Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.067020 4829 scope.go:117] "RemoveContainer" containerID="c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.104229 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" podStartSLOduration=2.371399963 podStartE2EDuration="38.104208945s" podCreationTimestamp="2026-01-22 00:32:30 +0000 UTC" firstStartedPulling="2026-01-22 00:32:31.477503152 +0000 UTC m=+1529.513745064" lastFinishedPulling="2026-01-22 00:33:07.210312124 +0000 UTC m=+1565.246554046" observedRunningTime="2026-01-22 00:33:08.099906192 +0000 UTC m=+1566.136148104" watchObservedRunningTime="2026-01-22 00:33:08.104208945 +0000 UTC m=+1566.140450857" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.123597 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cb6s5"] Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.123907 4829 scope.go:117] "RemoveContainer" containerID="8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.134625 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cb6s5"] Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.143829 4829 scope.go:117] "RemoveContainer" containerID="53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb" Jan 22 00:33:08 crc kubenswrapper[4829]: E0122 00:33:08.144315 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb\": container with ID starting with 53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb not found: ID does not exist" containerID="53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.144368 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb"} err="failed to get container status \"53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb\": rpc error: code = NotFound desc = could not find container \"53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb\": container with ID starting with 53f427f7659611c2d7cea0ee32c4258ad07f91f9469ced99dba9219dd541f1cb not found: ID does not exist" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.144400 4829 scope.go:117] "RemoveContainer" containerID="c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5" Jan 22 00:33:08 crc kubenswrapper[4829]: E0122 00:33:08.144769 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5\": container with ID starting with c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5 not found: ID does not exist" containerID="c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.144802 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5"} err="failed to get container status \"c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5\": rpc error: code = NotFound desc = could not find container \"c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5\": container with ID starting with c12835ed3db96bf8f6ceed9ad8ea5b0069e2aa85f897cdc477f98c58776539b5 not found: ID does not exist" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.144825 4829 scope.go:117] "RemoveContainer" containerID="8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8" Jan 22 00:33:08 crc kubenswrapper[4829]: E0122 00:33:08.145049 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8\": container with ID starting with 8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8 not found: ID does not exist" containerID="8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.145066 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8"} err="failed to get container status \"8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8\": rpc error: code = NotFound desc = could not find container \"8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8\": container with ID starting with 8edadcccb886b39de7fe611ecda1c624c1969045c536363f266985536598d5c8 not found: ID does not exist" Jan 22 00:33:08 crc kubenswrapper[4829]: I0122 00:33:08.563803 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" path="/var/lib/kubelet/pods/0d683d8b-71e6-4a7b-a2de-0bc30625d346/volumes" Jan 22 00:33:10 crc kubenswrapper[4829]: I0122 00:33:10.242394 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-qxqkv_956fb702-682a-4908-9ce8-4572084b0d4e/prometheus-webhook-snmp/0.log" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.277291 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jbn5g"] Jan 22 00:33:13 crc kubenswrapper[4829]: E0122 00:33:13.277727 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="extract-content" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.277742 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="extract-content" Jan 22 00:33:13 crc kubenswrapper[4829]: E0122 00:33:13.277761 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="extract-utilities" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.277768 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="extract-utilities" Jan 22 00:33:13 crc kubenswrapper[4829]: E0122 00:33:13.277787 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="registry-server" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.277795 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="registry-server" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.277945 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d683d8b-71e6-4a7b-a2de-0bc30625d346" containerName="registry-server" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.279163 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.289678 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jbn5g"] Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.470505 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45hk2\" (UniqueName: \"kubernetes.io/projected/d50dda23-abf3-4274-ba20-e9005d642293-kube-api-access-45hk2\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.470592 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-utilities\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.470654 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-catalog-content\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.572567 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45hk2\" (UniqueName: \"kubernetes.io/projected/d50dda23-abf3-4274-ba20-e9005d642293-kube-api-access-45hk2\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.572857 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-utilities\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.573011 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-catalog-content\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.573348 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-utilities\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.573457 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-catalog-content\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.596571 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45hk2\" (UniqueName: \"kubernetes.io/projected/d50dda23-abf3-4274-ba20-e9005d642293-kube-api-access-45hk2\") pod \"community-operators-jbn5g\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:13 crc kubenswrapper[4829]: I0122 00:33:13.598434 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:14 crc kubenswrapper[4829]: I0122 00:33:14.071979 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jbn5g"] Jan 22 00:33:14 crc kubenswrapper[4829]: I0122 00:33:14.116020 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbn5g" event={"ID":"d50dda23-abf3-4274-ba20-e9005d642293","Type":"ContainerStarted","Data":"3183c61c424d48442002d8431c075283f123e8d7729fc24ad38bdc08ba4052d8"} Jan 22 00:33:15 crc kubenswrapper[4829]: I0122 00:33:15.126623 4829 generic.go:334] "Generic (PLEG): container finished" podID="d50dda23-abf3-4274-ba20-e9005d642293" containerID="0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579" exitCode=0 Jan 22 00:33:15 crc kubenswrapper[4829]: I0122 00:33:15.126682 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbn5g" event={"ID":"d50dda23-abf3-4274-ba20-e9005d642293","Type":"ContainerDied","Data":"0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579"} Jan 22 00:33:16 crc kubenswrapper[4829]: I0122 00:33:16.137404 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbn5g" event={"ID":"d50dda23-abf3-4274-ba20-e9005d642293","Type":"ContainerStarted","Data":"e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35"} Jan 22 00:33:17 crc kubenswrapper[4829]: I0122 00:33:17.149637 4829 generic.go:334] "Generic (PLEG): container finished" podID="d50dda23-abf3-4274-ba20-e9005d642293" containerID="e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35" exitCode=0 Jan 22 00:33:17 crc kubenswrapper[4829]: I0122 00:33:17.149681 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbn5g" event={"ID":"d50dda23-abf3-4274-ba20-e9005d642293","Type":"ContainerDied","Data":"e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35"} Jan 22 00:33:18 crc kubenswrapper[4829]: I0122 00:33:18.159774 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbn5g" event={"ID":"d50dda23-abf3-4274-ba20-e9005d642293","Type":"ContainerStarted","Data":"c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971"} Jan 22 00:33:19 crc kubenswrapper[4829]: I0122 00:33:19.553580 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:33:19 crc kubenswrapper[4829]: E0122 00:33:19.554022 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.670226 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jbn5g" podStartSLOduration=5.263806947 podStartE2EDuration="7.670206067s" podCreationTimestamp="2026-01-22 00:33:13 +0000 UTC" firstStartedPulling="2026-01-22 00:33:15.128822983 +0000 UTC m=+1573.165064925" lastFinishedPulling="2026-01-22 00:33:17.535222133 +0000 UTC m=+1575.571464045" observedRunningTime="2026-01-22 00:33:18.202522059 +0000 UTC m=+1576.238764021" watchObservedRunningTime="2026-01-22 00:33:20.670206067 +0000 UTC m=+1578.706447979" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.672933 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6nc85"] Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.674219 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.698223 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6nc85"] Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.799773 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g54d7\" (UniqueName: \"kubernetes.io/projected/1714cbc8-7d0f-4743-8547-caabb6fb7181-kube-api-access-g54d7\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.799830 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-catalog-content\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.799863 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-utilities\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.901098 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g54d7\" (UniqueName: \"kubernetes.io/projected/1714cbc8-7d0f-4743-8547-caabb6fb7181-kube-api-access-g54d7\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.901148 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-catalog-content\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.901183 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-utilities\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.901663 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-utilities\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.901747 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-catalog-content\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.924466 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g54d7\" (UniqueName: \"kubernetes.io/projected/1714cbc8-7d0f-4743-8547-caabb6fb7181-kube-api-access-g54d7\") pod \"redhat-operators-6nc85\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:20 crc kubenswrapper[4829]: I0122 00:33:20.995428 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:21 crc kubenswrapper[4829]: I0122 00:33:21.187314 4829 generic.go:334] "Generic (PLEG): container finished" podID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerID="d264c90c4aab44da6651d404cff0980dadb520a58d66d0fb021e595bc1c6c254" exitCode=0 Jan 22 00:33:21 crc kubenswrapper[4829]: I0122 00:33:21.187683 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" event={"ID":"dc82dca4-55c8-4dba-8b80-8c8934cb3bff","Type":"ContainerDied","Data":"d264c90c4aab44da6651d404cff0980dadb520a58d66d0fb021e595bc1c6c254"} Jan 22 00:33:21 crc kubenswrapper[4829]: I0122 00:33:21.188301 4829 scope.go:117] "RemoveContainer" containerID="d264c90c4aab44da6651d404cff0980dadb520a58d66d0fb021e595bc1c6c254" Jan 22 00:33:21 crc kubenswrapper[4829]: I0122 00:33:21.481031 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6nc85"] Jan 22 00:33:21 crc kubenswrapper[4829]: W0122 00:33:21.489152 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1714cbc8_7d0f_4743_8547_caabb6fb7181.slice/crio-587471561cfd91586cc408760e22dcec42488f8faac1c057531772a862a56b7a WatchSource:0}: Error finding container 587471561cfd91586cc408760e22dcec42488f8faac1c057531772a862a56b7a: Status 404 returned error can't find the container with id 587471561cfd91586cc408760e22dcec42488f8faac1c057531772a862a56b7a Jan 22 00:33:22 crc kubenswrapper[4829]: I0122 00:33:22.195014 4829 generic.go:334] "Generic (PLEG): container finished" podID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerID="174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a" exitCode=0 Jan 22 00:33:22 crc kubenswrapper[4829]: I0122 00:33:22.195072 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nc85" event={"ID":"1714cbc8-7d0f-4743-8547-caabb6fb7181","Type":"ContainerDied","Data":"174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a"} Jan 22 00:33:22 crc kubenswrapper[4829]: I0122 00:33:22.196075 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nc85" event={"ID":"1714cbc8-7d0f-4743-8547-caabb6fb7181","Type":"ContainerStarted","Data":"587471561cfd91586cc408760e22dcec42488f8faac1c057531772a862a56b7a"} Jan 22 00:33:23 crc kubenswrapper[4829]: I0122 00:33:23.599812 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:23 crc kubenswrapper[4829]: I0122 00:33:23.600077 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:23 crc kubenswrapper[4829]: I0122 00:33:23.652413 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:24 crc kubenswrapper[4829]: I0122 00:33:24.227004 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nc85" event={"ID":"1714cbc8-7d0f-4743-8547-caabb6fb7181","Type":"ContainerStarted","Data":"53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925"} Jan 22 00:33:24 crc kubenswrapper[4829]: I0122 00:33:24.278526 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:26 crc kubenswrapper[4829]: I0122 00:33:26.456010 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jbn5g"] Jan 22 00:33:26 crc kubenswrapper[4829]: I0122 00:33:26.456679 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jbn5g" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="registry-server" containerID="cri-o://c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971" gracePeriod=2 Jan 22 00:33:27 crc kubenswrapper[4829]: I0122 00:33:27.254285 4829 generic.go:334] "Generic (PLEG): container finished" podID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerID="53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925" exitCode=0 Jan 22 00:33:27 crc kubenswrapper[4829]: I0122 00:33:27.254353 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nc85" event={"ID":"1714cbc8-7d0f-4743-8547-caabb6fb7181","Type":"ContainerDied","Data":"53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925"} Jan 22 00:33:27 crc kubenswrapper[4829]: I0122 00:33:27.985041 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.114170 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45hk2\" (UniqueName: \"kubernetes.io/projected/d50dda23-abf3-4274-ba20-e9005d642293-kube-api-access-45hk2\") pod \"d50dda23-abf3-4274-ba20-e9005d642293\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.114267 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-utilities\") pod \"d50dda23-abf3-4274-ba20-e9005d642293\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.114314 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-catalog-content\") pod \"d50dda23-abf3-4274-ba20-e9005d642293\" (UID: \"d50dda23-abf3-4274-ba20-e9005d642293\") " Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.114991 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-utilities" (OuterVolumeSpecName: "utilities") pod "d50dda23-abf3-4274-ba20-e9005d642293" (UID: "d50dda23-abf3-4274-ba20-e9005d642293"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.120077 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d50dda23-abf3-4274-ba20-e9005d642293-kube-api-access-45hk2" (OuterVolumeSpecName: "kube-api-access-45hk2") pod "d50dda23-abf3-4274-ba20-e9005d642293" (UID: "d50dda23-abf3-4274-ba20-e9005d642293"). InnerVolumeSpecName "kube-api-access-45hk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.162531 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d50dda23-abf3-4274-ba20-e9005d642293" (UID: "d50dda23-abf3-4274-ba20-e9005d642293"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.215872 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45hk2\" (UniqueName: \"kubernetes.io/projected/d50dda23-abf3-4274-ba20-e9005d642293-kube-api-access-45hk2\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.215916 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.215929 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d50dda23-abf3-4274-ba20-e9005d642293-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.263736 4829 generic.go:334] "Generic (PLEG): container finished" podID="d50dda23-abf3-4274-ba20-e9005d642293" containerID="c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971" exitCode=0 Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.263788 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbn5g" event={"ID":"d50dda23-abf3-4274-ba20-e9005d642293","Type":"ContainerDied","Data":"c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971"} Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.263823 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jbn5g" event={"ID":"d50dda23-abf3-4274-ba20-e9005d642293","Type":"ContainerDied","Data":"3183c61c424d48442002d8431c075283f123e8d7729fc24ad38bdc08ba4052d8"} Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.263845 4829 scope.go:117] "RemoveContainer" containerID="c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.263984 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jbn5g" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.296155 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jbn5g"] Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.296306 4829 scope.go:117] "RemoveContainer" containerID="e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.302637 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jbn5g"] Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.311841 4829 scope.go:117] "RemoveContainer" containerID="0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.345531 4829 scope.go:117] "RemoveContainer" containerID="c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971" Jan 22 00:33:28 crc kubenswrapper[4829]: E0122 00:33:28.346060 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971\": container with ID starting with c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971 not found: ID does not exist" containerID="c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.346103 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971"} err="failed to get container status \"c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971\": rpc error: code = NotFound desc = could not find container \"c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971\": container with ID starting with c236099e2a2f0f84e968596b64da726f0ac2d80de6ea1353d3f9b26854a34971 not found: ID does not exist" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.346131 4829 scope.go:117] "RemoveContainer" containerID="e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35" Jan 22 00:33:28 crc kubenswrapper[4829]: E0122 00:33:28.346609 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35\": container with ID starting with e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35 not found: ID does not exist" containerID="e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.346656 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35"} err="failed to get container status \"e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35\": rpc error: code = NotFound desc = could not find container \"e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35\": container with ID starting with e5e91a39c748a2bba35c13e63f51502c5569b772bf378acbfe91afbb8497bb35 not found: ID does not exist" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.346681 4829 scope.go:117] "RemoveContainer" containerID="0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579" Jan 22 00:33:28 crc kubenswrapper[4829]: E0122 00:33:28.346958 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579\": container with ID starting with 0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579 not found: ID does not exist" containerID="0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.346984 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579"} err="failed to get container status \"0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579\": rpc error: code = NotFound desc = could not find container \"0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579\": container with ID starting with 0309b67f66932b43e99149ed2a795d8eac096d73e4ac105449b63269ddd61579 not found: ID does not exist" Jan 22 00:33:28 crc kubenswrapper[4829]: I0122 00:33:28.561811 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d50dda23-abf3-4274-ba20-e9005d642293" path="/var/lib/kubelet/pods/d50dda23-abf3-4274-ba20-e9005d642293/volumes" Jan 22 00:33:29 crc kubenswrapper[4829]: I0122 00:33:29.274431 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nc85" event={"ID":"1714cbc8-7d0f-4743-8547-caabb6fb7181","Type":"ContainerStarted","Data":"f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab"} Jan 22 00:33:29 crc kubenswrapper[4829]: I0122 00:33:29.305594 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6nc85" podStartSLOduration=2.9441994080000002 podStartE2EDuration="9.305517473s" podCreationTimestamp="2026-01-22 00:33:20 +0000 UTC" firstStartedPulling="2026-01-22 00:33:22.196752601 +0000 UTC m=+1580.232994523" lastFinishedPulling="2026-01-22 00:33:28.558070676 +0000 UTC m=+1586.594312588" observedRunningTime="2026-01-22 00:33:29.290126477 +0000 UTC m=+1587.326368429" watchObservedRunningTime="2026-01-22 00:33:29.305517473 +0000 UTC m=+1587.341759385" Jan 22 00:33:30 crc kubenswrapper[4829]: I0122 00:33:30.996748 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:30 crc kubenswrapper[4829]: I0122 00:33:30.997094 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:32 crc kubenswrapper[4829]: I0122 00:33:32.046001 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6nc85" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="registry-server" probeResult="failure" output=< Jan 22 00:33:32 crc kubenswrapper[4829]: timeout: failed to connect service ":50051" within 1s Jan 22 00:33:32 crc kubenswrapper[4829]: > Jan 22 00:33:32 crc kubenswrapper[4829]: I0122 00:33:32.580217 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:33:32 crc kubenswrapper[4829]: E0122 00:33:32.580738 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:33:41 crc kubenswrapper[4829]: I0122 00:33:41.055514 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:41 crc kubenswrapper[4829]: I0122 00:33:41.111182 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:41 crc kubenswrapper[4829]: I0122 00:33:41.294589 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6nc85"] Jan 22 00:33:41 crc kubenswrapper[4829]: I0122 00:33:41.369872 4829 generic.go:334] "Generic (PLEG): container finished" podID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerID="1421dbcd4f3af85a8ca0d3ef59350c43cd03fc2aa22e9e778c130070f696189f" exitCode=0 Jan 22 00:33:41 crc kubenswrapper[4829]: I0122 00:33:41.369942 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" event={"ID":"dc82dca4-55c8-4dba-8b80-8c8934cb3bff","Type":"ContainerDied","Data":"1421dbcd4f3af85a8ca0d3ef59350c43cd03fc2aa22e9e778c130070f696189f"} Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.385840 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6nc85" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="registry-server" containerID="cri-o://f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab" gracePeriod=2 Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.694619 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.792882 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.838086 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-healthcheck-log\") pod \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.838158 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-config\") pod \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.838226 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-publisher\") pod \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.838292 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dgj9\" (UniqueName: \"kubernetes.io/projected/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-kube-api-access-6dgj9\") pod \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.838338 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-sensubility-config\") pod \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.838364 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-entrypoint-script\") pod \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.838397 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-entrypoint-script\") pod \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\" (UID: \"dc82dca4-55c8-4dba-8b80-8c8934cb3bff\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.849668 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-kube-api-access-6dgj9" (OuterVolumeSpecName: "kube-api-access-6dgj9") pod "dc82dca4-55c8-4dba-8b80-8c8934cb3bff" (UID: "dc82dca4-55c8-4dba-8b80-8c8934cb3bff"). InnerVolumeSpecName "kube-api-access-6dgj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.858073 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "dc82dca4-55c8-4dba-8b80-8c8934cb3bff" (UID: "dc82dca4-55c8-4dba-8b80-8c8934cb3bff"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.858518 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "dc82dca4-55c8-4dba-8b80-8c8934cb3bff" (UID: "dc82dca4-55c8-4dba-8b80-8c8934cb3bff"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.858641 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "dc82dca4-55c8-4dba-8b80-8c8934cb3bff" (UID: "dc82dca4-55c8-4dba-8b80-8c8934cb3bff"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.860298 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "dc82dca4-55c8-4dba-8b80-8c8934cb3bff" (UID: "dc82dca4-55c8-4dba-8b80-8c8934cb3bff"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.861865 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "dc82dca4-55c8-4dba-8b80-8c8934cb3bff" (UID: "dc82dca4-55c8-4dba-8b80-8c8934cb3bff"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.862475 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "dc82dca4-55c8-4dba-8b80-8c8934cb3bff" (UID: "dc82dca4-55c8-4dba-8b80-8c8934cb3bff"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.939284 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-utilities\") pod \"1714cbc8-7d0f-4743-8547-caabb6fb7181\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.939356 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g54d7\" (UniqueName: \"kubernetes.io/projected/1714cbc8-7d0f-4743-8547-caabb6fb7181-kube-api-access-g54d7\") pod \"1714cbc8-7d0f-4743-8547-caabb6fb7181\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.939830 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-catalog-content\") pod \"1714cbc8-7d0f-4743-8547-caabb6fb7181\" (UID: \"1714cbc8-7d0f-4743-8547-caabb6fb7181\") " Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940197 4829 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-healthcheck-log\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940215 4829 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940224 4829 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940211 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-utilities" (OuterVolumeSpecName: "utilities") pod "1714cbc8-7d0f-4743-8547-caabb6fb7181" (UID: "1714cbc8-7d0f-4743-8547-caabb6fb7181"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940233 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dgj9\" (UniqueName: \"kubernetes.io/projected/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-kube-api-access-6dgj9\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940302 4829 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-sensubility-config\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940319 4829 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.940339 4829 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/dc82dca4-55c8-4dba-8b80-8c8934cb3bff-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:42 crc kubenswrapper[4829]: I0122 00:33:42.941762 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1714cbc8-7d0f-4743-8547-caabb6fb7181-kube-api-access-g54d7" (OuterVolumeSpecName: "kube-api-access-g54d7") pod "1714cbc8-7d0f-4743-8547-caabb6fb7181" (UID: "1714cbc8-7d0f-4743-8547-caabb6fb7181"). InnerVolumeSpecName "kube-api-access-g54d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.041879 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.041915 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g54d7\" (UniqueName: \"kubernetes.io/projected/1714cbc8-7d0f-4743-8547-caabb6fb7181-kube-api-access-g54d7\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.101947 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1714cbc8-7d0f-4743-8547-caabb6fb7181" (UID: "1714cbc8-7d0f-4743-8547-caabb6fb7181"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.142934 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1714cbc8-7d0f-4743-8547-caabb6fb7181-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.396076 4829 generic.go:334] "Generic (PLEG): container finished" podID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerID="f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab" exitCode=0 Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.396140 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nc85" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.396151 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nc85" event={"ID":"1714cbc8-7d0f-4743-8547-caabb6fb7181","Type":"ContainerDied","Data":"f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab"} Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.396182 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nc85" event={"ID":"1714cbc8-7d0f-4743-8547-caabb6fb7181","Type":"ContainerDied","Data":"587471561cfd91586cc408760e22dcec42488f8faac1c057531772a862a56b7a"} Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.396201 4829 scope.go:117] "RemoveContainer" containerID="f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.400253 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" event={"ID":"dc82dca4-55c8-4dba-8b80-8c8934cb3bff","Type":"ContainerDied","Data":"f67820938035ae56be4d093df67c6a626fe0e2639a87b876593045ca00673175"} Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.400315 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f67820938035ae56be4d093df67c6a626fe0e2639a87b876593045ca00673175" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.400328 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-nkvjg" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.425096 4829 scope.go:117] "RemoveContainer" containerID="53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.434326 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6nc85"] Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.439693 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6nc85"] Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.462762 4829 scope.go:117] "RemoveContainer" containerID="174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.479075 4829 scope.go:117] "RemoveContainer" containerID="f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab" Jan 22 00:33:43 crc kubenswrapper[4829]: E0122 00:33:43.479499 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab\": container with ID starting with f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab not found: ID does not exist" containerID="f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.479562 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab"} err="failed to get container status \"f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab\": rpc error: code = NotFound desc = could not find container \"f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab\": container with ID starting with f7951ddfaac7cfb5cca847bd351fb92299da5cd1be0eb00f43522ba24324ddab not found: ID does not exist" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.479592 4829 scope.go:117] "RemoveContainer" containerID="53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925" Jan 22 00:33:43 crc kubenswrapper[4829]: E0122 00:33:43.480675 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925\": container with ID starting with 53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925 not found: ID does not exist" containerID="53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.480707 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925"} err="failed to get container status \"53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925\": rpc error: code = NotFound desc = could not find container \"53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925\": container with ID starting with 53b4de8ccd11ac35a0fa879255f66de4fbc9a3a5269c912dc59e532eddf0c925 not found: ID does not exist" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.480732 4829 scope.go:117] "RemoveContainer" containerID="174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a" Jan 22 00:33:43 crc kubenswrapper[4829]: E0122 00:33:43.481051 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a\": container with ID starting with 174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a not found: ID does not exist" containerID="174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a" Jan 22 00:33:43 crc kubenswrapper[4829]: I0122 00:33:43.481107 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a"} err="failed to get container status \"174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a\": rpc error: code = NotFound desc = could not find container \"174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a\": container with ID starting with 174fb28bf7afaaff286252bf626a5e496704f493312027d3435fb2da42f3b92a not found: ID does not exist" Jan 22 00:33:44 crc kubenswrapper[4829]: I0122 00:33:44.554287 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:33:44 crc kubenswrapper[4829]: E0122 00:33:44.554855 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:33:44 crc kubenswrapper[4829]: I0122 00:33:44.561036 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" path="/var/lib/kubelet/pods/1714cbc8-7d0f-4743-8547-caabb6fb7181/volumes" Jan 22 00:33:44 crc kubenswrapper[4829]: I0122 00:33:44.614720 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-nkvjg_dc82dca4-55c8-4dba-8b80-8c8934cb3bff/smoketest-collectd/0.log" Jan 22 00:33:44 crc kubenswrapper[4829]: I0122 00:33:44.875288 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-nkvjg_dc82dca4-55c8-4dba-8b80-8c8934cb3bff/smoketest-ceilometer/0.log" Jan 22 00:33:45 crc kubenswrapper[4829]: I0122 00:33:45.108225 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-ccnh9_3ac09cb4-04ec-4360-90c2-5e3d7cb81b87/default-interconnect/0.log" Jan 22 00:33:45 crc kubenswrapper[4829]: I0122 00:33:45.344753 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6_ed4e3e4a-9af6-470a-9f31-9bb2f31e166d/bridge/2.log" Jan 22 00:33:45 crc kubenswrapper[4829]: I0122 00:33:45.593950 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6_ed4e3e4a-9af6-470a-9f31-9bb2f31e166d/sg-core/0.log" Jan 22 00:33:45 crc kubenswrapper[4829]: I0122 00:33:45.826533 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-86889d79-n55rd_975d0f01-de8e-402c-aa4b-f582673781e9/bridge/2.log" Jan 22 00:33:46 crc kubenswrapper[4829]: I0122 00:33:46.056675 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-86889d79-n55rd_975d0f01-de8e-402c-aa4b-f582673781e9/sg-core/0.log" Jan 22 00:33:46 crc kubenswrapper[4829]: I0122 00:33:46.282955 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45_6bff65e7-d317-42b0-92fd-79aa3d4a6d7a/bridge/2.log" Jan 22 00:33:46 crc kubenswrapper[4829]: I0122 00:33:46.532992 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45_6bff65e7-d317-42b0-92fd-79aa3d4a6d7a/sg-core/0.log" Jan 22 00:33:46 crc kubenswrapper[4829]: I0122 00:33:46.801845 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f964b986c-5ln55_8adb7c3d-24d0-462c-a2bb-ff7533df28d6/bridge/2.log" Jan 22 00:33:47 crc kubenswrapper[4829]: I0122 00:33:47.050151 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f964b986c-5ln55_8adb7c3d-24d0-462c-a2bb-ff7533df28d6/sg-core/0.log" Jan 22 00:33:47 crc kubenswrapper[4829]: I0122 00:33:47.325265 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf_e0451c51-c962-4cad-88e1-4e58c936f3c4/bridge/2.log" Jan 22 00:33:47 crc kubenswrapper[4829]: I0122 00:33:47.618768 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf_e0451c51-c962-4cad-88e1-4e58c936f3c4/sg-core/0.log" Jan 22 00:33:51 crc kubenswrapper[4829]: I0122 00:33:51.002004 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-74c8dcd6b5-k6w8z_ea1e9cc4-4807-47c7-926f-2e9ff44b6f22/operator/0.log" Jan 22 00:33:51 crc kubenswrapper[4829]: I0122 00:33:51.299927 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_9f9e74a3-14d1-45b0-9c9f-1fa93676002a/prometheus/0.log" Jan 22 00:33:51 crc kubenswrapper[4829]: I0122 00:33:51.564398 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_39d65618-bed4-44b9-8893-94d1381c5421/elasticsearch/0.log" Jan 22 00:33:51 crc kubenswrapper[4829]: I0122 00:33:51.818021 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-qxqkv_956fb702-682a-4908-9ce8-4572084b0d4e/prometheus-webhook-snmp/0.log" Jan 22 00:33:52 crc kubenswrapper[4829]: I0122 00:33:52.164143 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6/alertmanager/0.log" Jan 22 00:33:57 crc kubenswrapper[4829]: I0122 00:33:57.553171 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:33:57 crc kubenswrapper[4829]: E0122 00:33:57.553906 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:34:03 crc kubenswrapper[4829]: I0122 00:34:03.258030 4829 scope.go:117] "RemoveContainer" containerID="144da8b574be7312e6ab254834bd89eef06d2491ed183ac3ef46f95145e0091d" Jan 22 00:34:03 crc kubenswrapper[4829]: I0122 00:34:03.285367 4829 scope.go:117] "RemoveContainer" containerID="f60432f5380ef13842f617d5cdba7cc65172f5204d94303264edaf5f401d2021" Jan 22 00:34:06 crc kubenswrapper[4829]: I0122 00:34:06.719108 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-79bb6b48f5-hsqj9_e34ba979-1b71-49d7-9dab-0aac0e390ff1/operator/0.log" Jan 22 00:34:09 crc kubenswrapper[4829]: I0122 00:34:09.553205 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:34:09 crc kubenswrapper[4829]: E0122 00:34:09.553772 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:34:09 crc kubenswrapper[4829]: I0122 00:34:09.927608 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-74c8dcd6b5-k6w8z_ea1e9cc4-4807-47c7-926f-2e9ff44b6f22/operator/0.log" Jan 22 00:34:10 crc kubenswrapper[4829]: I0122 00:34:10.198126 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_a7e55abd-fc7a-4fbb-a99d-695e2287e933/qdr/0.log" Jan 22 00:34:23 crc kubenswrapper[4829]: I0122 00:34:23.554591 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:34:23 crc kubenswrapper[4829]: E0122 00:34:23.555632 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:34:34 crc kubenswrapper[4829]: I0122 00:34:34.553483 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:34:34 crc kubenswrapper[4829]: E0122 00:34:34.554592 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.014898 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-p6czd/must-gather-64s6n"] Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015394 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerName="smoketest-ceilometer" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015407 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerName="smoketest-ceilometer" Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015421 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerName="smoketest-collectd" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015427 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerName="smoketest-collectd" Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015436 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="extract-content" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015442 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="extract-content" Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015449 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="registry-server" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015455 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="registry-server" Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015465 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="extract-utilities" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015471 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="extract-utilities" Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015482 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="registry-server" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015489 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="registry-server" Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015498 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="extract-utilities" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015504 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="extract-utilities" Jan 22 00:34:46 crc kubenswrapper[4829]: E0122 00:34:46.015517 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="extract-content" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015523 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="extract-content" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015641 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerName="smoketest-ceilometer" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015656 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="1714cbc8-7d0f-4743-8547-caabb6fb7181" containerName="registry-server" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015665 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc82dca4-55c8-4dba-8b80-8c8934cb3bff" containerName="smoketest-collectd" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.015674 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="d50dda23-abf3-4274-ba20-e9005d642293" containerName="registry-server" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.016325 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.019868 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-p6czd"/"kube-root-ca.crt" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.020016 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-p6czd"/"default-dockercfg-dlc2h" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.020181 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-p6czd"/"openshift-service-ca.crt" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.036430 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-p6czd/must-gather-64s6n"] Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.057870 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bddc30c9-a49d-44ac-80d7-d5aef02b5ca4-must-gather-output\") pod \"must-gather-64s6n\" (UID: \"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4\") " pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.057989 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skqd6\" (UniqueName: \"kubernetes.io/projected/bddc30c9-a49d-44ac-80d7-d5aef02b5ca4-kube-api-access-skqd6\") pod \"must-gather-64s6n\" (UID: \"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4\") " pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.158916 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bddc30c9-a49d-44ac-80d7-d5aef02b5ca4-must-gather-output\") pod \"must-gather-64s6n\" (UID: \"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4\") " pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.159428 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bddc30c9-a49d-44ac-80d7-d5aef02b5ca4-must-gather-output\") pod \"must-gather-64s6n\" (UID: \"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4\") " pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.159460 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skqd6\" (UniqueName: \"kubernetes.io/projected/bddc30c9-a49d-44ac-80d7-d5aef02b5ca4-kube-api-access-skqd6\") pod \"must-gather-64s6n\" (UID: \"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4\") " pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.178800 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skqd6\" (UniqueName: \"kubernetes.io/projected/bddc30c9-a49d-44ac-80d7-d5aef02b5ca4-kube-api-access-skqd6\") pod \"must-gather-64s6n\" (UID: \"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4\") " pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.337687 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-p6czd/must-gather-64s6n" Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.770960 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-p6czd/must-gather-64s6n"] Jan 22 00:34:46 crc kubenswrapper[4829]: I0122 00:34:46.977135 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-p6czd/must-gather-64s6n" event={"ID":"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4","Type":"ContainerStarted","Data":"b405995d129cec042370f7da0562ace37cb47c0b6cac6beb32dcaeb8d1d78a1f"} Jan 22 00:34:48 crc kubenswrapper[4829]: I0122 00:34:48.554049 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:34:48 crc kubenswrapper[4829]: E0122 00:34:48.554465 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:34:51 crc kubenswrapper[4829]: I0122 00:34:51.006710 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-p6czd/must-gather-64s6n" event={"ID":"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4","Type":"ContainerStarted","Data":"f6543dfd1f3c0a19cc91d2d63045033dcb919a9a29c2948083c4ef17760a18b1"} Jan 22 00:34:52 crc kubenswrapper[4829]: I0122 00:34:52.014715 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-p6czd/must-gather-64s6n" event={"ID":"bddc30c9-a49d-44ac-80d7-d5aef02b5ca4","Type":"ContainerStarted","Data":"5922de019f19b07f1b43526505b4c554b30427da43fcbe90d40c5c79579bfe81"} Jan 22 00:34:52 crc kubenswrapper[4829]: I0122 00:34:52.031501 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-p6czd/must-gather-64s6n" podStartSLOduration=3.148888659 podStartE2EDuration="7.031481665s" podCreationTimestamp="2026-01-22 00:34:45 +0000 UTC" firstStartedPulling="2026-01-22 00:34:46.779461459 +0000 UTC m=+1664.815703371" lastFinishedPulling="2026-01-22 00:34:50.662054465 +0000 UTC m=+1668.698296377" observedRunningTime="2026-01-22 00:34:52.030732242 +0000 UTC m=+1670.066974154" watchObservedRunningTime="2026-01-22 00:34:52.031481665 +0000 UTC m=+1670.067723577" Jan 22 00:34:59 crc kubenswrapper[4829]: I0122 00:34:59.553853 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:34:59 crc kubenswrapper[4829]: E0122 00:34:59.554631 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:35:03 crc kubenswrapper[4829]: I0122 00:35:03.185888 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-cvqvz_db0366e2-9560-4f1a-949c-66d4ddc09b89/control-plane-machine-set-operator/0.log" Jan 22 00:35:03 crc kubenswrapper[4829]: I0122 00:35:03.204320 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nxg4z_3916e963-6bce-4316-b02e-98b5565e8615/kube-rbac-proxy/0.log" Jan 22 00:35:03 crc kubenswrapper[4829]: I0122 00:35:03.212488 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nxg4z_3916e963-6bce-4316-b02e-98b5565e8615/machine-api-operator/0.log" Jan 22 00:35:08 crc kubenswrapper[4829]: I0122 00:35:08.473484 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-qx4r2_12fe7f95-6902-42ad-82b7-e7162826af10/cert-manager-controller/0.log" Jan 22 00:35:08 crc kubenswrapper[4829]: I0122 00:35:08.485633 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-6rdp8_c88bc26a-1dc4-459d-8e47-86eafb891058/cert-manager-cainjector/0.log" Jan 22 00:35:08 crc kubenswrapper[4829]: I0122 00:35:08.497717 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-vwmph_a13fcd99-a750-4831-a905-548b24cccf48/cert-manager-webhook/0.log" Jan 22 00:35:10 crc kubenswrapper[4829]: I0122 00:35:10.554274 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:35:10 crc kubenswrapper[4829]: E0122 00:35:10.554836 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:35:13 crc kubenswrapper[4829]: I0122 00:35:13.727739 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-dq7x7_a6e6ea79-b55a-4652-bc1f-4788eb17b6d4/prometheus-operator/0.log" Jan 22 00:35:13 crc kubenswrapper[4829]: I0122 00:35:13.749091 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f_893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d/prometheus-operator-admission-webhook/0.log" Jan 22 00:35:13 crc kubenswrapper[4829]: I0122 00:35:13.769711 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-666d948cd9-nmvft_3cc8f122-dbed-494d-873d-dde35bf15c60/prometheus-operator-admission-webhook/0.log" Jan 22 00:35:13 crc kubenswrapper[4829]: I0122 00:35:13.788284 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-klpvs_bdb33684-1dcb-41b0-904e-7bcb3aa2d1de/operator/0.log" Jan 22 00:35:13 crc kubenswrapper[4829]: I0122 00:35:13.809028 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-992z9_f8606d02-0830-4b52-80b0-5e30e6003f08/perses-operator/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.030404 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h_0225cd53-73fa-4345-85b3-7de7de23f707/extract/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.039077 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h_0225cd53-73fa-4345-85b3-7de7de23f707/util/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.070246 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931anqv4h_0225cd53-73fa-4345-85b3-7de7de23f707/pull/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.083380 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq_3b106cde-e55f-49c8-bf6a-c449b5c11b79/extract/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.091799 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq_3b106cde-e55f-49c8-bf6a-c449b5c11b79/util/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.098644 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8f6nmcq_3b106cde-e55f-49c8-bf6a-c449b5c11b79/pull/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.108908 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h_0bf87245-ac20-41c2-ad0e-4b08d5aea64e/extract/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.118688 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h_0bf87245-ac20-41c2-ad0e-4b08d5aea64e/util/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.125182 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e5t66h_0bf87245-ac20-41c2-ad0e-4b08d5aea64e/pull/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.134394 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt_8a8c9ff7-662e-475e-8571-12d6572c3cd1/extract/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.139947 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt_8a8c9ff7-662e-475e-8571-12d6572c3cd1/util/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.148042 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08bp8xt_8a8c9ff7-662e-475e-8571-12d6572c3cd1/pull/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.517811 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g7zhb_e0b65e9f-05eb-47f2-ba46-cb2de6aa0888/registry-server/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.523462 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g7zhb_e0b65e9f-05eb-47f2-ba46-cb2de6aa0888/extract-utilities/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.541831 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g7zhb_e0b65e9f-05eb-47f2-ba46-cb2de6aa0888/extract-content/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.848592 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k4khp_562c006a-af63-46fc-91b8-fcab2322a59d/registry-server/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.852747 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k4khp_562c006a-af63-46fc-91b8-fcab2322a59d/extract-utilities/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.858824 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-k4khp_562c006a-af63-46fc-91b8-fcab2322a59d/extract-content/0.log" Jan 22 00:35:19 crc kubenswrapper[4829]: I0122 00:35:19.877478 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-k5z2d_61cad9e2-128b-4059-b8cb-7b024ecb5ce3/marketplace-operator/0.log" Jan 22 00:35:20 crc kubenswrapper[4829]: I0122 00:35:20.198116 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twbvl_d464f7a3-9a5c-4666-90a5-26ab2bf827b1/registry-server/0.log" Jan 22 00:35:20 crc kubenswrapper[4829]: I0122 00:35:20.203262 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twbvl_d464f7a3-9a5c-4666-90a5-26ab2bf827b1/extract-utilities/0.log" Jan 22 00:35:20 crc kubenswrapper[4829]: I0122 00:35:20.210124 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-twbvl_d464f7a3-9a5c-4666-90a5-26ab2bf827b1/extract-content/0.log" Jan 22 00:35:22 crc kubenswrapper[4829]: I0122 00:35:22.559116 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:35:22 crc kubenswrapper[4829]: E0122 00:35:22.559690 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:35:23 crc kubenswrapper[4829]: I0122 00:35:23.933259 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-dq7x7_a6e6ea79-b55a-4652-bc1f-4788eb17b6d4/prometheus-operator/0.log" Jan 22 00:35:23 crc kubenswrapper[4829]: I0122 00:35:23.943830 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f_893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d/prometheus-operator-admission-webhook/0.log" Jan 22 00:35:23 crc kubenswrapper[4829]: I0122 00:35:23.954550 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-666d948cd9-nmvft_3cc8f122-dbed-494d-873d-dde35bf15c60/prometheus-operator-admission-webhook/0.log" Jan 22 00:35:23 crc kubenswrapper[4829]: I0122 00:35:23.973665 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-klpvs_bdb33684-1dcb-41b0-904e-7bcb3aa2d1de/operator/0.log" Jan 22 00:35:23 crc kubenswrapper[4829]: I0122 00:35:23.989643 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-992z9_f8606d02-0830-4b52-80b0-5e30e6003f08/perses-operator/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.362933 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-dq7x7_a6e6ea79-b55a-4652-bc1f-4788eb17b6d4/prometheus-operator/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.388022 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-666d948cd9-6xt2f_893c6bae-0ad7-4fb7-8f5a-5c5a13d41a3d/prometheus-operator-admission-webhook/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.408687 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-666d948cd9-nmvft_3cc8f122-dbed-494d-873d-dde35bf15c60/prometheus-operator-admission-webhook/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.434620 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-klpvs_bdb33684-1dcb-41b0-904e-7bcb3aa2d1de/operator/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.453193 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-992z9_f8606d02-0830-4b52-80b0-5e30e6003f08/perses-operator/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.515689 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-qx4r2_12fe7f95-6902-42ad-82b7-e7162826af10/cert-manager-controller/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.532230 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-6rdp8_c88bc26a-1dc4-459d-8e47-86eafb891058/cert-manager-cainjector/0.log" Jan 22 00:35:33 crc kubenswrapper[4829]: I0122 00:35:33.541839 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-vwmph_a13fcd99-a750-4831-a905-548b24cccf48/cert-manager-webhook/0.log" Jan 22 00:35:34 crc kubenswrapper[4829]: I0122 00:35:34.065797 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-qx4r2_12fe7f95-6902-42ad-82b7-e7162826af10/cert-manager-controller/0.log" Jan 22 00:35:34 crc kubenswrapper[4829]: I0122 00:35:34.083257 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-6rdp8_c88bc26a-1dc4-459d-8e47-86eafb891058/cert-manager-cainjector/0.log" Jan 22 00:35:34 crc kubenswrapper[4829]: I0122 00:35:34.095056 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-vwmph_a13fcd99-a750-4831-a905-548b24cccf48/cert-manager-webhook/0.log" Jan 22 00:35:34 crc kubenswrapper[4829]: I0122 00:35:34.688937 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-cvqvz_db0366e2-9560-4f1a-949c-66d4ddc09b89/control-plane-machine-set-operator/0.log" Jan 22 00:35:34 crc kubenswrapper[4829]: I0122 00:35:34.700430 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nxg4z_3916e963-6bce-4316-b02e-98b5565e8615/kube-rbac-proxy/0.log" Jan 22 00:35:34 crc kubenswrapper[4829]: I0122 00:35:34.709150 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nxg4z_3916e963-6bce-4316-b02e-98b5565e8615/machine-api-operator/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.172306 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6/alertmanager/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.178885 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6/config-reloader/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.184346 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6/oauth-proxy/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.193523 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_cdb9c9e1-b114-4f4c-8ba0-1e84247a85c6/init-config-reloader/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.201842 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_25c58ba2-18e2-4b11-998e-dc2e69b18b80/curl/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.210809 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f964b986c-5ln55_8adb7c3d-24d0-462c-a2bb-ff7533df28d6/bridge/2.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.211020 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f964b986c-5ln55_8adb7c3d-24d0-462c-a2bb-ff7533df28d6/bridge/1.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.214789 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-f964b986c-5ln55_8adb7c3d-24d0-462c-a2bb-ff7533df28d6/sg-core/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.228288 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45_6bff65e7-d317-42b0-92fd-79aa3d4a6d7a/oauth-proxy/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.235274 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45_6bff65e7-d317-42b0-92fd-79aa3d4a6d7a/bridge/2.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.235446 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45_6bff65e7-d317-42b0-92fd-79aa3d4a6d7a/bridge/1.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.239269 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-57948895dc-cgk45_6bff65e7-d317-42b0-92fd-79aa3d4a6d7a/sg-core/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.248157 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-86889d79-n55rd_975d0f01-de8e-402c-aa4b-f582673781e9/bridge/2.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.248486 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-86889d79-n55rd_975d0f01-de8e-402c-aa4b-f582673781e9/bridge/1.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.252366 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-86889d79-n55rd_975d0f01-de8e-402c-aa4b-f582673781e9/sg-core/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.263170 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6_ed4e3e4a-9af6-470a-9f31-9bb2f31e166d/oauth-proxy/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.269504 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6_ed4e3e4a-9af6-470a-9f31-9bb2f31e166d/bridge/2.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.270399 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6_ed4e3e4a-9af6-470a-9f31-9bb2f31e166d/bridge/1.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.274813 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7cd87f9766-z56k6_ed4e3e4a-9af6-470a-9f31-9bb2f31e166d/sg-core/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.286530 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf_e0451c51-c962-4cad-88e1-4e58c936f3c4/oauth-proxy/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.295892 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf_e0451c51-c962-4cad-88e1-4e58c936f3c4/bridge/1.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.296326 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf_e0451c51-c962-4cad-88e1-4e58c936f3c4/bridge/2.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.300952 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-5759b4d97-wjndf_e0451c51-c962-4cad-88e1-4e58c936f3c4/sg-core/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.329865 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-ccnh9_3ac09cb4-04ec-4360-90c2-5e3d7cb81b87/default-interconnect/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.337384 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-6856cfb745-qxqkv_956fb702-682a-4908-9ce8-4572084b0d4e/prometheus-webhook-snmp/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.362932 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elastic-operator-754d769d7d-ngl5t_6e0a5463-b4ff-4f3c-96e8-a041af2f985c/manager/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.384574 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_39d65618-bed4-44b9-8893-94d1381c5421/elasticsearch/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.390690 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_39d65618-bed4-44b9-8893-94d1381c5421/elastic-internal-init-filesystem/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.396750 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_39d65618-bed4-44b9-8893-94d1381c5421/elastic-internal-suspend/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.407077 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_interconnect-operator-5bb49f789d-28w49_e7f04bc7-8fec-45a1-8d01-cbff2015ceda/interconnect-operator/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.418843 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_9f9e74a3-14d1-45b0-9c9f-1fa93676002a/prometheus/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.424873 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_9f9e74a3-14d1-45b0-9c9f-1fa93676002a/config-reloader/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.431074 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_9f9e74a3-14d1-45b0-9c9f-1fa93676002a/oauth-proxy/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.436722 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_9f9e74a3-14d1-45b0-9c9f-1fa93676002a/init-config-reloader/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.466569 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-2-build_391f97ab-a0ff-4185-bf21-e9563069d026/docker-build/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.471559 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-2-build_391f97ab-a0ff-4185-bf21-e9563069d026/git-clone/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.477078 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-webhook-snmp-2-build_391f97ab-a0ff-4185-bf21-e9563069d026/manage-dockerfile/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.489775 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_a7e55abd-fc7a-4fbb-a99d-695e2287e933/qdr/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.545270 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-4-build_8fceabc2-5259-476d-9079-8e2b72ab18ed/docker-build/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.550758 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-4-build_8fceabc2-5259-476d-9079-8e2b72ab18ed/git-clone/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.553444 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:35:35 crc kubenswrapper[4829]: E0122 00:35:35.553758 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.557614 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-4-build_8fceabc2-5259-476d-9079-8e2b72ab18ed/manage-dockerfile/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.743821 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-79bb6b48f5-hsqj9_e34ba979-1b71-49d7-9dab-0aac0e390ff1/operator/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.789975 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-2-build_bee7053e-10b4-4b1b-9ff2-1a96e8814a26/docker-build/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.794910 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-2-build_bee7053e-10b4-4b1b-9ff2-1a96e8814a26/git-clone/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.802138 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-bridge-2-build_bee7053e-10b4-4b1b-9ff2-1a96e8814a26/manage-dockerfile/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.841016 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-2-build_1d1c4c78-5e7d-4ed5-b394-73698d1d02d1/docker-build/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.845855 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-2-build_1d1c4c78-5e7d-4ed5-b394-73698d1d02d1/git-clone/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.851396 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_sg-core-2-build_1d1c4c78-5e7d-4ed5-b394-73698d1d02d1/manage-dockerfile/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.905768 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-2-build_b8fd44cb-8191-4b46-8260-1bc8aa31fe6d/docker-build/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.913507 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-2-build_b8fd44cb-8191-4b46-8260-1bc8aa31fe6d/git-clone/0.log" Jan 22 00:35:35 crc kubenswrapper[4829]: I0122 00:35:35.921384 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-2-build_b8fd44cb-8191-4b46-8260-1bc8aa31fe6d/manage-dockerfile/0.log" Jan 22 00:35:39 crc kubenswrapper[4829]: I0122 00:35:39.044516 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-74c8dcd6b5-k6w8z_ea1e9cc4-4807-47c7-926f-2e9ff44b6f22/operator/0.log" Jan 22 00:35:39 crc kubenswrapper[4829]: I0122 00:35:39.066874 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-nkvjg_dc82dca4-55c8-4dba-8b80-8c8934cb3bff/smoketest-collectd/0.log" Jan 22 00:35:39 crc kubenswrapper[4829]: I0122 00:35:39.072826 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-nkvjg_dc82dca4-55c8-4dba-8b80-8c8934cb3bff/smoketest-ceilometer/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.576984 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/3.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.621116 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4ss4n_60f879f6-8b21-4e75-9a62-d372fec048e1/kube-multus/2.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.633186 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-v62gj_257dfafb-8d80-4de2-97e5-96df6b004a43/kube-multus-additional-cni-plugins/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.640131 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-v62gj_257dfafb-8d80-4de2-97e5-96df6b004a43/egress-router-binary-copy/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.647637 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-v62gj_257dfafb-8d80-4de2-97e5-96df6b004a43/cni-plugins/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.654013 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-v62gj_257dfafb-8d80-4de2-97e5-96df6b004a43/bond-cni-plugin/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.660952 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-v62gj_257dfafb-8d80-4de2-97e5-96df6b004a43/routeoverride-cni/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.666270 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-v62gj_257dfafb-8d80-4de2-97e5-96df6b004a43/whereabouts-cni-bincopy/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.673134 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-v62gj_257dfafb-8d80-4de2-97e5-96df6b004a43/whereabouts-cni/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.682933 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-6l5hd_7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2/multus-admission-controller/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.688299 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-6l5hd_7fa648de-a4e5-4e1d-8bbb-d84d5409f0b2/kube-rbac-proxy/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.708241 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-c82dd_74beaade-c8f6-4d34-842b-1c03fe72b195/network-metrics-daemon/0.log" Jan 22 00:35:40 crc kubenswrapper[4829]: I0122 00:35:40.713636 4829 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-c82dd_74beaade-c8f6-4d34-842b-1c03fe72b195/kube-rbac-proxy/0.log" Jan 22 00:35:49 crc kubenswrapper[4829]: I0122 00:35:49.554389 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:35:49 crc kubenswrapper[4829]: E0122 00:35:49.555727 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:36:01 crc kubenswrapper[4829]: I0122 00:36:01.553360 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:36:01 crc kubenswrapper[4829]: E0122 00:36:01.554025 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:36:16 crc kubenswrapper[4829]: I0122 00:36:16.553814 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:36:16 crc kubenswrapper[4829]: E0122 00:36:16.554751 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:36:30 crc kubenswrapper[4829]: I0122 00:36:30.554599 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:36:30 crc kubenswrapper[4829]: E0122 00:36:30.555181 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:36:43 crc kubenswrapper[4829]: I0122 00:36:43.553860 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:36:43 crc kubenswrapper[4829]: E0122 00:36:43.555249 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:36:58 crc kubenswrapper[4829]: I0122 00:36:58.553218 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:36:58 crc kubenswrapper[4829]: E0122 00:36:58.553858 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:37:10 crc kubenswrapper[4829]: I0122 00:37:10.553841 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:37:10 crc kubenswrapper[4829]: E0122 00:37:10.554955 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:37:21 crc kubenswrapper[4829]: I0122 00:37:21.554018 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:37:21 crc kubenswrapper[4829]: E0122 00:37:21.554883 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:37:34 crc kubenswrapper[4829]: I0122 00:37:34.554717 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:37:34 crc kubenswrapper[4829]: E0122 00:37:34.555643 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:37:48 crc kubenswrapper[4829]: I0122 00:37:48.553755 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:37:48 crc kubenswrapper[4829]: E0122 00:37:48.554329 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:38:00 crc kubenswrapper[4829]: I0122 00:38:00.558601 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:38:00 crc kubenswrapper[4829]: E0122 00:38:00.559796 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:38:11 crc kubenswrapper[4829]: I0122 00:38:11.554012 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:38:12 crc kubenswrapper[4829]: I0122 00:38:12.591087 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"9bb5f6aa7ec6668e87a354e30c1c293084ca01252e6b4dae22249ac4c707f10f"} Jan 22 00:40:34 crc kubenswrapper[4829]: I0122 00:40:34.658571 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:40:34 crc kubenswrapper[4829]: I0122 00:40:34.659100 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:41:04 crc kubenswrapper[4829]: I0122 00:41:04.659411 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:41:04 crc kubenswrapper[4829]: I0122 00:41:04.660015 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:41:34 crc kubenswrapper[4829]: I0122 00:41:34.659089 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:41:34 crc kubenswrapper[4829]: I0122 00:41:34.659806 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:41:34 crc kubenswrapper[4829]: I0122 00:41:34.659986 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:41:34 crc kubenswrapper[4829]: I0122 00:41:34.661114 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9bb5f6aa7ec6668e87a354e30c1c293084ca01252e6b4dae22249ac4c707f10f"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:41:34 crc kubenswrapper[4829]: I0122 00:41:34.661223 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://9bb5f6aa7ec6668e87a354e30c1c293084ca01252e6b4dae22249ac4c707f10f" gracePeriod=600 Jan 22 00:41:35 crc kubenswrapper[4829]: I0122 00:41:35.310776 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="9bb5f6aa7ec6668e87a354e30c1c293084ca01252e6b4dae22249ac4c707f10f" exitCode=0 Jan 22 00:41:35 crc kubenswrapper[4829]: I0122 00:41:35.311309 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"9bb5f6aa7ec6668e87a354e30c1c293084ca01252e6b4dae22249ac4c707f10f"} Jan 22 00:41:35 crc kubenswrapper[4829]: I0122 00:41:35.311936 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f"} Jan 22 00:41:35 crc kubenswrapper[4829]: I0122 00:41:35.311968 4829 scope.go:117] "RemoveContainer" containerID="eaa1b519bd00ba76c68bf0d607780944cf2d26092f82e5346ad33e7ecf200061" Jan 22 00:43:34 crc kubenswrapper[4829]: I0122 00:43:34.658565 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:43:34 crc kubenswrapper[4829]: I0122 00:43:34.659604 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:43:38 crc kubenswrapper[4829]: I0122 00:43:38.994955 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h8vz4"] Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.000960 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.026483 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h8vz4"] Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.120600 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-catalog-content\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.120655 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm9mc\" (UniqueName: \"kubernetes.io/projected/7fdb6b45-0fac-407c-a585-0bb220b91a02-kube-api-access-pm9mc\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.120739 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-utilities\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.199229 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c656k"] Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.201243 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.214198 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c656k"] Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.222283 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-utilities\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.222417 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-catalog-content\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.222454 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm9mc\" (UniqueName: \"kubernetes.io/projected/7fdb6b45-0fac-407c-a585-0bb220b91a02-kube-api-access-pm9mc\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.222966 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-utilities\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.223195 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-catalog-content\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.246205 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm9mc\" (UniqueName: \"kubernetes.io/projected/7fdb6b45-0fac-407c-a585-0bb220b91a02-kube-api-access-pm9mc\") pod \"community-operators-h8vz4\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.323384 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-catalog-content\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.323775 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck5b4\" (UniqueName: \"kubernetes.io/projected/9c7a996d-6203-417f-a847-ba550650f6fb-kube-api-access-ck5b4\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.323901 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-utilities\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.336212 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.425725 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck5b4\" (UniqueName: \"kubernetes.io/projected/9c7a996d-6203-417f-a847-ba550650f6fb-kube-api-access-ck5b4\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.425790 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-utilities\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.425818 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-catalog-content\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.435483 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-utilities\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.435708 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-catalog-content\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.519719 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck5b4\" (UniqueName: \"kubernetes.io/projected/9c7a996d-6203-417f-a847-ba550650f6fb-kube-api-access-ck5b4\") pod \"certified-operators-c656k\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.586153 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:39 crc kubenswrapper[4829]: I0122 00:43:39.698732 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h8vz4"] Jan 22 00:43:39 crc kubenswrapper[4829]: W0122 00:43:39.725745 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fdb6b45_0fac_407c_a585_0bb220b91a02.slice/crio-66375862d7445bc15ea4b83222c1a8d4191651ba4b0aca3d6a4a1389c621d551 WatchSource:0}: Error finding container 66375862d7445bc15ea4b83222c1a8d4191651ba4b0aca3d6a4a1389c621d551: Status 404 returned error can't find the container with id 66375862d7445bc15ea4b83222c1a8d4191651ba4b0aca3d6a4a1389c621d551 Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.137181 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c656k"] Jan 22 00:43:40 crc kubenswrapper[4829]: W0122 00:43:40.141739 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c7a996d_6203_417f_a847_ba550650f6fb.slice/crio-fdc3223e706d0dda4d164940a94191a110453f458e823f670eaca47c2bde8477 WatchSource:0}: Error finding container fdc3223e706d0dda4d164940a94191a110453f458e823f670eaca47c2bde8477: Status 404 returned error can't find the container with id fdc3223e706d0dda4d164940a94191a110453f458e823f670eaca47c2bde8477 Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.471967 4829 generic.go:334] "Generic (PLEG): container finished" podID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerID="fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00" exitCode=0 Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.472057 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8vz4" event={"ID":"7fdb6b45-0fac-407c-a585-0bb220b91a02","Type":"ContainerDied","Data":"fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00"} Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.472602 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8vz4" event={"ID":"7fdb6b45-0fac-407c-a585-0bb220b91a02","Type":"ContainerStarted","Data":"66375862d7445bc15ea4b83222c1a8d4191651ba4b0aca3d6a4a1389c621d551"} Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.474484 4829 generic.go:334] "Generic (PLEG): container finished" podID="9c7a996d-6203-417f-a847-ba550650f6fb" containerID="be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127" exitCode=0 Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.474553 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c656k" event={"ID":"9c7a996d-6203-417f-a847-ba550650f6fb","Type":"ContainerDied","Data":"be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127"} Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.474584 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c656k" event={"ID":"9c7a996d-6203-417f-a847-ba550650f6fb","Type":"ContainerStarted","Data":"fdc3223e706d0dda4d164940a94191a110453f458e823f670eaca47c2bde8477"} Jan 22 00:43:40 crc kubenswrapper[4829]: I0122 00:43:40.477235 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.413100 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vdjzg"] Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.414979 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.425764 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vdjzg"] Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.477018 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7knpf\" (UniqueName: \"kubernetes.io/projected/36463693-e87e-4760-9d56-d5c389ac7f5e-kube-api-access-7knpf\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.477122 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-catalog-content\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.477159 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-utilities\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.481976 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8vz4" event={"ID":"7fdb6b45-0fac-407c-a585-0bb220b91a02","Type":"ContainerStarted","Data":"5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac"} Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.484330 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c656k" event={"ID":"9c7a996d-6203-417f-a847-ba550650f6fb","Type":"ContainerStarted","Data":"c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12"} Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.578802 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-utilities\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.578868 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7knpf\" (UniqueName: \"kubernetes.io/projected/36463693-e87e-4760-9d56-d5c389ac7f5e-kube-api-access-7knpf\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.579000 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-catalog-content\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.579508 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-catalog-content\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.579673 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-utilities\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.609831 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7knpf\" (UniqueName: \"kubernetes.io/projected/36463693-e87e-4760-9d56-d5c389ac7f5e-kube-api-access-7knpf\") pod \"redhat-operators-vdjzg\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:41 crc kubenswrapper[4829]: I0122 00:43:41.733066 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.223741 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vdjzg"] Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.494925 4829 generic.go:334] "Generic (PLEG): container finished" podID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerID="9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42" exitCode=0 Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.495086 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vdjzg" event={"ID":"36463693-e87e-4760-9d56-d5c389ac7f5e","Type":"ContainerDied","Data":"9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42"} Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.495381 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vdjzg" event={"ID":"36463693-e87e-4760-9d56-d5c389ac7f5e","Type":"ContainerStarted","Data":"c07e24302e43a51cbe1fc8068197eaf5e4879d1a00b7cfc3e1e192ca86153da6"} Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.501815 4829 generic.go:334] "Generic (PLEG): container finished" podID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerID="5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac" exitCode=0 Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.501857 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8vz4" event={"ID":"7fdb6b45-0fac-407c-a585-0bb220b91a02","Type":"ContainerDied","Data":"5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac"} Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.504391 4829 generic.go:334] "Generic (PLEG): container finished" podID="9c7a996d-6203-417f-a847-ba550650f6fb" containerID="c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12" exitCode=0 Jan 22 00:43:42 crc kubenswrapper[4829]: I0122 00:43:42.504432 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c656k" event={"ID":"9c7a996d-6203-417f-a847-ba550650f6fb","Type":"ContainerDied","Data":"c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12"} Jan 22 00:43:43 crc kubenswrapper[4829]: I0122 00:43:43.514866 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8vz4" event={"ID":"7fdb6b45-0fac-407c-a585-0bb220b91a02","Type":"ContainerStarted","Data":"cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4"} Jan 22 00:43:43 crc kubenswrapper[4829]: I0122 00:43:43.518654 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c656k" event={"ID":"9c7a996d-6203-417f-a847-ba550650f6fb","Type":"ContainerStarted","Data":"150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d"} Jan 22 00:43:43 crc kubenswrapper[4829]: I0122 00:43:43.521409 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vdjzg" event={"ID":"36463693-e87e-4760-9d56-d5c389ac7f5e","Type":"ContainerStarted","Data":"39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19"} Jan 22 00:43:43 crc kubenswrapper[4829]: I0122 00:43:43.567373 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h8vz4" podStartSLOduration=3.120211064 podStartE2EDuration="5.567354678s" podCreationTimestamp="2026-01-22 00:43:38 +0000 UTC" firstStartedPulling="2026-01-22 00:43:40.476952017 +0000 UTC m=+2198.513193929" lastFinishedPulling="2026-01-22 00:43:42.924095631 +0000 UTC m=+2200.960337543" observedRunningTime="2026-01-22 00:43:43.545187114 +0000 UTC m=+2201.581429036" watchObservedRunningTime="2026-01-22 00:43:43.567354678 +0000 UTC m=+2201.603596590" Jan 22 00:43:43 crc kubenswrapper[4829]: I0122 00:43:43.592062 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c656k" podStartSLOduration=2.173251063 podStartE2EDuration="4.59203242s" podCreationTimestamp="2026-01-22 00:43:39 +0000 UTC" firstStartedPulling="2026-01-22 00:43:40.479161815 +0000 UTC m=+2198.515403727" lastFinishedPulling="2026-01-22 00:43:42.897943172 +0000 UTC m=+2200.934185084" observedRunningTime="2026-01-22 00:43:43.584054154 +0000 UTC m=+2201.620296076" watchObservedRunningTime="2026-01-22 00:43:43.59203242 +0000 UTC m=+2201.628274372" Jan 22 00:43:48 crc kubenswrapper[4829]: I0122 00:43:48.566594 4829 generic.go:334] "Generic (PLEG): container finished" podID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerID="39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19" exitCode=0 Jan 22 00:43:48 crc kubenswrapper[4829]: I0122 00:43:48.566660 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vdjzg" event={"ID":"36463693-e87e-4760-9d56-d5c389ac7f5e","Type":"ContainerDied","Data":"39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19"} Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.336774 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.336872 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.403921 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.576995 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vdjzg" event={"ID":"36463693-e87e-4760-9d56-d5c389ac7f5e","Type":"ContainerStarted","Data":"c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99"} Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.586959 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.586997 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.602665 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vdjzg" podStartSLOduration=1.844805845 podStartE2EDuration="8.602639974s" podCreationTimestamp="2026-01-22 00:43:41 +0000 UTC" firstStartedPulling="2026-01-22 00:43:42.497786853 +0000 UTC m=+2200.534028765" lastFinishedPulling="2026-01-22 00:43:49.255620982 +0000 UTC m=+2207.291862894" observedRunningTime="2026-01-22 00:43:49.597009059 +0000 UTC m=+2207.633250981" watchObservedRunningTime="2026-01-22 00:43:49.602639974 +0000 UTC m=+2207.638881896" Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.630722 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:49 crc kubenswrapper[4829]: I0122 00:43:49.644508 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:50 crc kubenswrapper[4829]: I0122 00:43:50.645585 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:51 crc kubenswrapper[4829]: I0122 00:43:51.733604 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:51 crc kubenswrapper[4829]: I0122 00:43:51.734811 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:43:51 crc kubenswrapper[4829]: I0122 00:43:51.986646 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c656k"] Jan 22 00:43:52 crc kubenswrapper[4829]: I0122 00:43:52.605786 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c656k" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="registry-server" containerID="cri-o://150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d" gracePeriod=2 Jan 22 00:43:52 crc kubenswrapper[4829]: I0122 00:43:52.800684 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vdjzg" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="registry-server" probeResult="failure" output=< Jan 22 00:43:52 crc kubenswrapper[4829]: timeout: failed to connect service ":50051" within 1s Jan 22 00:43:52 crc kubenswrapper[4829]: > Jan 22 00:43:52 crc kubenswrapper[4829]: I0122 00:43:52.991380 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.067283 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck5b4\" (UniqueName: \"kubernetes.io/projected/9c7a996d-6203-417f-a847-ba550650f6fb-kube-api-access-ck5b4\") pod \"9c7a996d-6203-417f-a847-ba550650f6fb\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.067607 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-catalog-content\") pod \"9c7a996d-6203-417f-a847-ba550650f6fb\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.067669 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-utilities\") pod \"9c7a996d-6203-417f-a847-ba550650f6fb\" (UID: \"9c7a996d-6203-417f-a847-ba550650f6fb\") " Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.068711 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-utilities" (OuterVolumeSpecName: "utilities") pod "9c7a996d-6203-417f-a847-ba550650f6fb" (UID: "9c7a996d-6203-417f-a847-ba550650f6fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.078718 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c7a996d-6203-417f-a847-ba550650f6fb-kube-api-access-ck5b4" (OuterVolumeSpecName: "kube-api-access-ck5b4") pod "9c7a996d-6203-417f-a847-ba550650f6fb" (UID: "9c7a996d-6203-417f-a847-ba550650f6fb"). InnerVolumeSpecName "kube-api-access-ck5b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.115921 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c7a996d-6203-417f-a847-ba550650f6fb" (UID: "9c7a996d-6203-417f-a847-ba550650f6fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.169336 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck5b4\" (UniqueName: \"kubernetes.io/projected/9c7a996d-6203-417f-a847-ba550650f6fb-kube-api-access-ck5b4\") on node \"crc\" DevicePath \"\"" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.169384 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.169397 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c7a996d-6203-417f-a847-ba550650f6fb-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.617288 4829 generic.go:334] "Generic (PLEG): container finished" podID="9c7a996d-6203-417f-a847-ba550650f6fb" containerID="150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d" exitCode=0 Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.617345 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c656k" event={"ID":"9c7a996d-6203-417f-a847-ba550650f6fb","Type":"ContainerDied","Data":"150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d"} Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.617382 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c656k" event={"ID":"9c7a996d-6203-417f-a847-ba550650f6fb","Type":"ContainerDied","Data":"fdc3223e706d0dda4d164940a94191a110453f458e823f670eaca47c2bde8477"} Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.617414 4829 scope.go:117] "RemoveContainer" containerID="150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.617424 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c656k" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.657479 4829 scope.go:117] "RemoveContainer" containerID="c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.692931 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c656k"] Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.706363 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c656k"] Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.711891 4829 scope.go:117] "RemoveContainer" containerID="be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.729913 4829 scope.go:117] "RemoveContainer" containerID="150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d" Jan 22 00:43:53 crc kubenswrapper[4829]: E0122 00:43:53.742910 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d\": container with ID starting with 150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d not found: ID does not exist" containerID="150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.743023 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d"} err="failed to get container status \"150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d\": rpc error: code = NotFound desc = could not find container \"150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d\": container with ID starting with 150427ee5d46899deb573f100689850811c5ad133d419946610e55500117358d not found: ID does not exist" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.743066 4829 scope.go:117] "RemoveContainer" containerID="c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12" Jan 22 00:43:53 crc kubenswrapper[4829]: E0122 00:43:53.744030 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12\": container with ID starting with c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12 not found: ID does not exist" containerID="c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.744213 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12"} err="failed to get container status \"c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12\": rpc error: code = NotFound desc = could not find container \"c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12\": container with ID starting with c766f26d21d93fb776abda5b9a1231ed1049ed196703b43e298c197c37019a12 not found: ID does not exist" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.744394 4829 scope.go:117] "RemoveContainer" containerID="be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127" Jan 22 00:43:53 crc kubenswrapper[4829]: E0122 00:43:53.747439 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127\": container with ID starting with be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127 not found: ID does not exist" containerID="be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127" Jan 22 00:43:53 crc kubenswrapper[4829]: I0122 00:43:53.747481 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127"} err="failed to get container status \"be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127\": rpc error: code = NotFound desc = could not find container \"be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127\": container with ID starting with be8be10503fd0cf38dd36d2e51d7eabb1cb60ea7187b02400a3252bf9d708127 not found: ID does not exist" Jan 22 00:43:54 crc kubenswrapper[4829]: I0122 00:43:54.564289 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" path="/var/lib/kubelet/pods/9c7a996d-6203-417f-a847-ba550650f6fb/volumes" Jan 22 00:43:54 crc kubenswrapper[4829]: I0122 00:43:54.585418 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h8vz4"] Jan 22 00:43:54 crc kubenswrapper[4829]: I0122 00:43:54.585969 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h8vz4" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="registry-server" containerID="cri-o://cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4" gracePeriod=2 Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.042771 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.103246 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-utilities\") pod \"7fdb6b45-0fac-407c-a585-0bb220b91a02\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.103310 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm9mc\" (UniqueName: \"kubernetes.io/projected/7fdb6b45-0fac-407c-a585-0bb220b91a02-kube-api-access-pm9mc\") pod \"7fdb6b45-0fac-407c-a585-0bb220b91a02\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.103361 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-catalog-content\") pod \"7fdb6b45-0fac-407c-a585-0bb220b91a02\" (UID: \"7fdb6b45-0fac-407c-a585-0bb220b91a02\") " Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.105686 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-utilities" (OuterVolumeSpecName: "utilities") pod "7fdb6b45-0fac-407c-a585-0bb220b91a02" (UID: "7fdb6b45-0fac-407c-a585-0bb220b91a02"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.120000 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fdb6b45-0fac-407c-a585-0bb220b91a02-kube-api-access-pm9mc" (OuterVolumeSpecName: "kube-api-access-pm9mc") pod "7fdb6b45-0fac-407c-a585-0bb220b91a02" (UID: "7fdb6b45-0fac-407c-a585-0bb220b91a02"). InnerVolumeSpecName "kube-api-access-pm9mc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.177754 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7fdb6b45-0fac-407c-a585-0bb220b91a02" (UID: "7fdb6b45-0fac-407c-a585-0bb220b91a02"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.204850 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.204882 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm9mc\" (UniqueName: \"kubernetes.io/projected/7fdb6b45-0fac-407c-a585-0bb220b91a02-kube-api-access-pm9mc\") on node \"crc\" DevicePath \"\"" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.204896 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fdb6b45-0fac-407c-a585-0bb220b91a02-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.633742 4829 generic.go:334] "Generic (PLEG): container finished" podID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerID="cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4" exitCode=0 Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.633809 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8vz4" event={"ID":"7fdb6b45-0fac-407c-a585-0bb220b91a02","Type":"ContainerDied","Data":"cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4"} Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.633834 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h8vz4" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.633857 4829 scope.go:117] "RemoveContainer" containerID="cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.633845 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h8vz4" event={"ID":"7fdb6b45-0fac-407c-a585-0bb220b91a02","Type":"ContainerDied","Data":"66375862d7445bc15ea4b83222c1a8d4191651ba4b0aca3d6a4a1389c621d551"} Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.657168 4829 scope.go:117] "RemoveContainer" containerID="5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.685453 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h8vz4"] Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.687451 4829 scope.go:117] "RemoveContainer" containerID="fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.696345 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h8vz4"] Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.716860 4829 scope.go:117] "RemoveContainer" containerID="cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4" Jan 22 00:43:55 crc kubenswrapper[4829]: E0122 00:43:55.717447 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4\": container with ID starting with cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4 not found: ID does not exist" containerID="cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.717481 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4"} err="failed to get container status \"cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4\": rpc error: code = NotFound desc = could not find container \"cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4\": container with ID starting with cc362bdbffecfb126773995dc2d8d9b9324dba5e569e6929a973e7bc39154fd4 not found: ID does not exist" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.717504 4829 scope.go:117] "RemoveContainer" containerID="5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac" Jan 22 00:43:55 crc kubenswrapper[4829]: E0122 00:43:55.717913 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac\": container with ID starting with 5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac not found: ID does not exist" containerID="5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.718037 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac"} err="failed to get container status \"5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac\": rpc error: code = NotFound desc = could not find container \"5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac\": container with ID starting with 5bc2273969b2b77b578ad8a92b34b8f0ad613ecc35ea8de9875cf303376844ac not found: ID does not exist" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.718139 4829 scope.go:117] "RemoveContainer" containerID="fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00" Jan 22 00:43:55 crc kubenswrapper[4829]: E0122 00:43:55.718646 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00\": container with ID starting with fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00 not found: ID does not exist" containerID="fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00" Jan 22 00:43:55 crc kubenswrapper[4829]: I0122 00:43:55.718674 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00"} err="failed to get container status \"fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00\": rpc error: code = NotFound desc = could not find container \"fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00\": container with ID starting with fe69496edc85905ebf2c067336a59eacc2c6e9aae7a98a5c3b3604efbd639c00 not found: ID does not exist" Jan 22 00:43:56 crc kubenswrapper[4829]: I0122 00:43:56.565528 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" path="/var/lib/kubelet/pods/7fdb6b45-0fac-407c-a585-0bb220b91a02/volumes" Jan 22 00:44:01 crc kubenswrapper[4829]: I0122 00:44:01.793348 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:44:01 crc kubenswrapper[4829]: I0122 00:44:01.838995 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:44:03 crc kubenswrapper[4829]: I0122 00:44:03.785663 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vdjzg"] Jan 22 00:44:03 crc kubenswrapper[4829]: I0122 00:44:03.786180 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vdjzg" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="registry-server" containerID="cri-o://c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99" gracePeriod=2 Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.211739 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.263184 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-catalog-content\") pod \"36463693-e87e-4760-9d56-d5c389ac7f5e\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.263355 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7knpf\" (UniqueName: \"kubernetes.io/projected/36463693-e87e-4760-9d56-d5c389ac7f5e-kube-api-access-7knpf\") pod \"36463693-e87e-4760-9d56-d5c389ac7f5e\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.263391 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-utilities\") pod \"36463693-e87e-4760-9d56-d5c389ac7f5e\" (UID: \"36463693-e87e-4760-9d56-d5c389ac7f5e\") " Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.264300 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-utilities" (OuterVolumeSpecName: "utilities") pod "36463693-e87e-4760-9d56-d5c389ac7f5e" (UID: "36463693-e87e-4760-9d56-d5c389ac7f5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.273942 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36463693-e87e-4760-9d56-d5c389ac7f5e-kube-api-access-7knpf" (OuterVolumeSpecName: "kube-api-access-7knpf") pod "36463693-e87e-4760-9d56-d5c389ac7f5e" (UID: "36463693-e87e-4760-9d56-d5c389ac7f5e"). InnerVolumeSpecName "kube-api-access-7knpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.364797 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7knpf\" (UniqueName: \"kubernetes.io/projected/36463693-e87e-4760-9d56-d5c389ac7f5e-kube-api-access-7knpf\") on node \"crc\" DevicePath \"\"" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.364833 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.385771 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "36463693-e87e-4760-9d56-d5c389ac7f5e" (UID: "36463693-e87e-4760-9d56-d5c389ac7f5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.466380 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36463693-e87e-4760-9d56-d5c389ac7f5e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.658799 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.658884 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.723710 4829 generic.go:334] "Generic (PLEG): container finished" podID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerID="c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99" exitCode=0 Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.723781 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vdjzg" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.723838 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vdjzg" event={"ID":"36463693-e87e-4760-9d56-d5c389ac7f5e","Type":"ContainerDied","Data":"c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99"} Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.724294 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vdjzg" event={"ID":"36463693-e87e-4760-9d56-d5c389ac7f5e","Type":"ContainerDied","Data":"c07e24302e43a51cbe1fc8068197eaf5e4879d1a00b7cfc3e1e192ca86153da6"} Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.724336 4829 scope.go:117] "RemoveContainer" containerID="c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.760398 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vdjzg"] Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.763113 4829 scope.go:117] "RemoveContainer" containerID="39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.770067 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vdjzg"] Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.798895 4829 scope.go:117] "RemoveContainer" containerID="9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.820246 4829 scope.go:117] "RemoveContainer" containerID="c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99" Jan 22 00:44:04 crc kubenswrapper[4829]: E0122 00:44:04.820875 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99\": container with ID starting with c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99 not found: ID does not exist" containerID="c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.820958 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99"} err="failed to get container status \"c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99\": rpc error: code = NotFound desc = could not find container \"c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99\": container with ID starting with c38b740209e92103e75b02061aeb05658f5b2cfe54812a384240dfcf1f3fcf99 not found: ID does not exist" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.821022 4829 scope.go:117] "RemoveContainer" containerID="39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19" Jan 22 00:44:04 crc kubenswrapper[4829]: E0122 00:44:04.821759 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19\": container with ID starting with 39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19 not found: ID does not exist" containerID="39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.821825 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19"} err="failed to get container status \"39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19\": rpc error: code = NotFound desc = could not find container \"39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19\": container with ID starting with 39e1bf5b50210fad8136855eff651160782f5cf367caef8298556fa4c9d30c19 not found: ID does not exist" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.821869 4829 scope.go:117] "RemoveContainer" containerID="9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42" Jan 22 00:44:04 crc kubenswrapper[4829]: E0122 00:44:04.822399 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42\": container with ID starting with 9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42 not found: ID does not exist" containerID="9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42" Jan 22 00:44:04 crc kubenswrapper[4829]: I0122 00:44:04.822472 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42"} err="failed to get container status \"9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42\": rpc error: code = NotFound desc = could not find container \"9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42\": container with ID starting with 9079aa80d91e45ea296078c40a16fd4043e21bc1b742606a3ec38e89d5c7df42 not found: ID does not exist" Jan 22 00:44:06 crc kubenswrapper[4829]: I0122 00:44:06.567996 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" path="/var/lib/kubelet/pods/36463693-e87e-4760-9d56-d5c389ac7f5e/volumes" Jan 22 00:44:34 crc kubenswrapper[4829]: I0122 00:44:34.658408 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:44:34 crc kubenswrapper[4829]: I0122 00:44:34.659164 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:44:34 crc kubenswrapper[4829]: I0122 00:44:34.659238 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:44:34 crc kubenswrapper[4829]: I0122 00:44:34.660436 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:44:34 crc kubenswrapper[4829]: I0122 00:44:34.660576 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" gracePeriod=600 Jan 22 00:44:34 crc kubenswrapper[4829]: E0122 00:44:34.791731 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:44:35 crc kubenswrapper[4829]: I0122 00:44:35.029139 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" exitCode=0 Jan 22 00:44:35 crc kubenswrapper[4829]: I0122 00:44:35.029180 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f"} Jan 22 00:44:35 crc kubenswrapper[4829]: I0122 00:44:35.029209 4829 scope.go:117] "RemoveContainer" containerID="9bb5f6aa7ec6668e87a354e30c1c293084ca01252e6b4dae22249ac4c707f10f" Jan 22 00:44:35 crc kubenswrapper[4829]: I0122 00:44:35.029681 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:44:35 crc kubenswrapper[4829]: E0122 00:44:35.029887 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:44:46 crc kubenswrapper[4829]: I0122 00:44:46.553605 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:44:46 crc kubenswrapper[4829]: E0122 00:44:46.554498 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.160965 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77"] Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.165424 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="extract-utilities" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.165462 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="extract-utilities" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.165497 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="extract-content" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.165514 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="extract-content" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.166474 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.166571 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.166607 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="extract-content" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.166624 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="extract-content" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.166646 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.166662 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.166691 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="extract-utilities" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.166707 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="extract-utilities" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.166728 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="extract-utilities" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.166744 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="extract-utilities" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.166818 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="extract-content" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.166837 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="extract-content" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.166870 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.166885 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.167171 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fdb6b45-0fac-407c-a585-0bb220b91a02" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.167213 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c7a996d-6203-417f-a847-ba550650f6fb" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.167271 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="36463693-e87e-4760-9d56-d5c389ac7f5e" containerName="registry-server" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.168359 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.171641 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.172100 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.188432 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77"] Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.302560 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6xb9\" (UniqueName: \"kubernetes.io/projected/4367babb-2f0e-4f12-b3e1-8269c06490ce-kube-api-access-z6xb9\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.302641 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4367babb-2f0e-4f12-b3e1-8269c06490ce-config-volume\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.302703 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4367babb-2f0e-4f12-b3e1-8269c06490ce-secret-volume\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.404162 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6xb9\" (UniqueName: \"kubernetes.io/projected/4367babb-2f0e-4f12-b3e1-8269c06490ce-kube-api-access-z6xb9\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.404573 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4367babb-2f0e-4f12-b3e1-8269c06490ce-config-volume\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.404735 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4367babb-2f0e-4f12-b3e1-8269c06490ce-secret-volume\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.405558 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4367babb-2f0e-4f12-b3e1-8269c06490ce-config-volume\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.417290 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4367babb-2f0e-4f12-b3e1-8269c06490ce-secret-volume\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.433292 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6xb9\" (UniqueName: \"kubernetes.io/projected/4367babb-2f0e-4f12-b3e1-8269c06490ce-kube-api-access-z6xb9\") pod \"collect-profiles-29484045-qvd77\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.499789 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.554129 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:45:00 crc kubenswrapper[4829]: E0122 00:45:00.554468 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:45:00 crc kubenswrapper[4829]: I0122 00:45:00.742064 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77"] Jan 22 00:45:01 crc kubenswrapper[4829]: I0122 00:45:01.265076 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" event={"ID":"4367babb-2f0e-4f12-b3e1-8269c06490ce","Type":"ContainerStarted","Data":"0367847067fbed6110f0a91788a5ce2aa754c6cbcfff51066e51d4ff072d7326"} Jan 22 00:45:01 crc kubenswrapper[4829]: I0122 00:45:01.265127 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" event={"ID":"4367babb-2f0e-4f12-b3e1-8269c06490ce","Type":"ContainerStarted","Data":"69615aace0ee44df97aaed077dfd786dfe4a706fb9bd027ee230e5cd632f90c6"} Jan 22 00:45:02 crc kubenswrapper[4829]: I0122 00:45:02.278032 4829 generic.go:334] "Generic (PLEG): container finished" podID="4367babb-2f0e-4f12-b3e1-8269c06490ce" containerID="0367847067fbed6110f0a91788a5ce2aa754c6cbcfff51066e51d4ff072d7326" exitCode=0 Jan 22 00:45:02 crc kubenswrapper[4829]: I0122 00:45:02.278326 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" event={"ID":"4367babb-2f0e-4f12-b3e1-8269c06490ce","Type":"ContainerDied","Data":"0367847067fbed6110f0a91788a5ce2aa754c6cbcfff51066e51d4ff072d7326"} Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.679816 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.760745 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4367babb-2f0e-4f12-b3e1-8269c06490ce-config-volume\") pod \"4367babb-2f0e-4f12-b3e1-8269c06490ce\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.760986 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6xb9\" (UniqueName: \"kubernetes.io/projected/4367babb-2f0e-4f12-b3e1-8269c06490ce-kube-api-access-z6xb9\") pod \"4367babb-2f0e-4f12-b3e1-8269c06490ce\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.761069 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4367babb-2f0e-4f12-b3e1-8269c06490ce-secret-volume\") pod \"4367babb-2f0e-4f12-b3e1-8269c06490ce\" (UID: \"4367babb-2f0e-4f12-b3e1-8269c06490ce\") " Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.761308 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4367babb-2f0e-4f12-b3e1-8269c06490ce-config-volume" (OuterVolumeSpecName: "config-volume") pod "4367babb-2f0e-4f12-b3e1-8269c06490ce" (UID: "4367babb-2f0e-4f12-b3e1-8269c06490ce"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.761590 4829 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4367babb-2f0e-4f12-b3e1-8269c06490ce-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.772869 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4367babb-2f0e-4f12-b3e1-8269c06490ce-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4367babb-2f0e-4f12-b3e1-8269c06490ce" (UID: "4367babb-2f0e-4f12-b3e1-8269c06490ce"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.772957 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4367babb-2f0e-4f12-b3e1-8269c06490ce-kube-api-access-z6xb9" (OuterVolumeSpecName: "kube-api-access-z6xb9") pod "4367babb-2f0e-4f12-b3e1-8269c06490ce" (UID: "4367babb-2f0e-4f12-b3e1-8269c06490ce"). InnerVolumeSpecName "kube-api-access-z6xb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.863000 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6xb9\" (UniqueName: \"kubernetes.io/projected/4367babb-2f0e-4f12-b3e1-8269c06490ce-kube-api-access-z6xb9\") on node \"crc\" DevicePath \"\"" Jan 22 00:45:03 crc kubenswrapper[4829]: I0122 00:45:03.863048 4829 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4367babb-2f0e-4f12-b3e1-8269c06490ce-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 00:45:04 crc kubenswrapper[4829]: I0122 00:45:04.293196 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" event={"ID":"4367babb-2f0e-4f12-b3e1-8269c06490ce","Type":"ContainerDied","Data":"69615aace0ee44df97aaed077dfd786dfe4a706fb9bd027ee230e5cd632f90c6"} Jan 22 00:45:04 crc kubenswrapper[4829]: I0122 00:45:04.293239 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69615aace0ee44df97aaed077dfd786dfe4a706fb9bd027ee230e5cd632f90c6" Jan 22 00:45:04 crc kubenswrapper[4829]: I0122 00:45:04.293254 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484045-qvd77" Jan 22 00:45:04 crc kubenswrapper[4829]: I0122 00:45:04.345262 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf"] Jan 22 00:45:04 crc kubenswrapper[4829]: I0122 00:45:04.353160 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484000-n7tmf"] Jan 22 00:45:04 crc kubenswrapper[4829]: I0122 00:45:04.564341 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c91fb8a4-d4a3-4782-94f2-3d6eb06221d9" path="/var/lib/kubelet/pods/c91fb8a4-d4a3-4782-94f2-3d6eb06221d9/volumes" Jan 22 00:45:11 crc kubenswrapper[4829]: I0122 00:45:11.553925 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:45:11 crc kubenswrapper[4829]: E0122 00:45:11.554713 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:45:22 crc kubenswrapper[4829]: I0122 00:45:22.557037 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:45:22 crc kubenswrapper[4829]: E0122 00:45:22.557680 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:45:33 crc kubenswrapper[4829]: I0122 00:45:33.554264 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:45:33 crc kubenswrapper[4829]: E0122 00:45:33.555087 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:45:45 crc kubenswrapper[4829]: I0122 00:45:45.553954 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:45:45 crc kubenswrapper[4829]: E0122 00:45:45.554820 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:45:56 crc kubenswrapper[4829]: I0122 00:45:56.553066 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:45:56 crc kubenswrapper[4829]: E0122 00:45:56.554817 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:46:03 crc kubenswrapper[4829]: I0122 00:46:03.587133 4829 scope.go:117] "RemoveContainer" containerID="945e7215b35c6a762902d687d8410eb7be115fac49c7fdd0e3ad36070e81f81b" Jan 22 00:46:11 crc kubenswrapper[4829]: I0122 00:46:11.553616 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:46:11 crc kubenswrapper[4829]: E0122 00:46:11.554589 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:46:22 crc kubenswrapper[4829]: I0122 00:46:22.567521 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:46:22 crc kubenswrapper[4829]: E0122 00:46:22.568075 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:46:33 crc kubenswrapper[4829]: I0122 00:46:33.553407 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:46:33 crc kubenswrapper[4829]: E0122 00:46:33.554140 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:46:47 crc kubenswrapper[4829]: I0122 00:46:47.553589 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:46:47 crc kubenswrapper[4829]: E0122 00:46:47.554343 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:47:02 crc kubenswrapper[4829]: I0122 00:47:02.561187 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:47:02 crc kubenswrapper[4829]: E0122 00:47:02.561995 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:47:14 crc kubenswrapper[4829]: I0122 00:47:14.554352 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:47:14 crc kubenswrapper[4829]: E0122 00:47:14.555402 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:47:28 crc kubenswrapper[4829]: I0122 00:47:28.554590 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:47:28 crc kubenswrapper[4829]: E0122 00:47:28.555596 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:47:42 crc kubenswrapper[4829]: I0122 00:47:42.559872 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:47:42 crc kubenswrapper[4829]: E0122 00:47:42.560755 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:47:53 crc kubenswrapper[4829]: I0122 00:47:53.553608 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:47:53 crc kubenswrapper[4829]: E0122 00:47:53.554305 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:48:08 crc kubenswrapper[4829]: I0122 00:48:08.559688 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:48:08 crc kubenswrapper[4829]: E0122 00:48:08.560871 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:48:21 crc kubenswrapper[4829]: I0122 00:48:21.553641 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:48:21 crc kubenswrapper[4829]: E0122 00:48:21.555387 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:48:32 crc kubenswrapper[4829]: I0122 00:48:32.566173 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:48:32 crc kubenswrapper[4829]: E0122 00:48:32.567592 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:48:44 crc kubenswrapper[4829]: I0122 00:48:44.554804 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:48:44 crc kubenswrapper[4829]: E0122 00:48:44.555672 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:48:57 crc kubenswrapper[4829]: I0122 00:48:57.554929 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:48:57 crc kubenswrapper[4829]: E0122 00:48:57.555888 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:49:09 crc kubenswrapper[4829]: I0122 00:49:09.553766 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:49:09 crc kubenswrapper[4829]: E0122 00:49:09.554699 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:49:23 crc kubenswrapper[4829]: I0122 00:49:23.553660 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:49:23 crc kubenswrapper[4829]: E0122 00:49:23.554411 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:49:34 crc kubenswrapper[4829]: I0122 00:49:34.554223 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:49:34 crc kubenswrapper[4829]: E0122 00:49:34.555000 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:49:49 crc kubenswrapper[4829]: I0122 00:49:49.553332 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:49:50 crc kubenswrapper[4829]: I0122 00:49:50.346401 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"15982f19cf72331ab8bf2e1cf76da92034f496939a60c8859d8942ae9cdedd3d"} Jan 22 00:52:04 crc kubenswrapper[4829]: I0122 00:52:04.658378 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:52:04 crc kubenswrapper[4829]: I0122 00:52:04.661072 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:52:34 crc kubenswrapper[4829]: I0122 00:52:34.658623 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:52:34 crc kubenswrapper[4829]: I0122 00:52:34.659200 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:53:04 crc kubenswrapper[4829]: I0122 00:53:04.658164 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:53:04 crc kubenswrapper[4829]: I0122 00:53:04.658696 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:53:04 crc kubenswrapper[4829]: I0122 00:53:04.658748 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:53:04 crc kubenswrapper[4829]: I0122 00:53:04.659329 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"15982f19cf72331ab8bf2e1cf76da92034f496939a60c8859d8942ae9cdedd3d"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:53:04 crc kubenswrapper[4829]: I0122 00:53:04.659374 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://15982f19cf72331ab8bf2e1cf76da92034f496939a60c8859d8942ae9cdedd3d" gracePeriod=600 Jan 22 00:53:05 crc kubenswrapper[4829]: I0122 00:53:05.171391 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="15982f19cf72331ab8bf2e1cf76da92034f496939a60c8859d8942ae9cdedd3d" exitCode=0 Jan 22 00:53:05 crc kubenswrapper[4829]: I0122 00:53:05.171459 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"15982f19cf72331ab8bf2e1cf76da92034f496939a60c8859d8942ae9cdedd3d"} Jan 22 00:53:05 crc kubenswrapper[4829]: I0122 00:53:05.171722 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f"} Jan 22 00:53:05 crc kubenswrapper[4829]: I0122 00:53:05.171744 4829 scope.go:117] "RemoveContainer" containerID="41079ec4c896ca69d873bb5d20535371b3691903a216729cbffe6beefcb7955f" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.597326 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6w9n4"] Jan 22 00:53:44 crc kubenswrapper[4829]: E0122 00:53:44.598173 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4367babb-2f0e-4f12-b3e1-8269c06490ce" containerName="collect-profiles" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.598191 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="4367babb-2f0e-4f12-b3e1-8269c06490ce" containerName="collect-profiles" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.598348 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="4367babb-2f0e-4f12-b3e1-8269c06490ce" containerName="collect-profiles" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.599392 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.609448 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-utilities\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.609602 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-catalog-content\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.609681 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw4r2\" (UniqueName: \"kubernetes.io/projected/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-kube-api-access-nw4r2\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.609841 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6w9n4"] Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.711739 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-utilities\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.711865 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-catalog-content\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.711903 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw4r2\" (UniqueName: \"kubernetes.io/projected/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-kube-api-access-nw4r2\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.712421 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-utilities\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.712428 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-catalog-content\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.734342 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw4r2\" (UniqueName: \"kubernetes.io/projected/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-kube-api-access-nw4r2\") pod \"redhat-operators-6w9n4\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:44 crc kubenswrapper[4829]: I0122 00:53:44.922606 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:45 crc kubenswrapper[4829]: I0122 00:53:45.203424 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6w9n4"] Jan 22 00:53:45 crc kubenswrapper[4829]: I0122 00:53:45.560836 4829 generic.go:334] "Generic (PLEG): container finished" podID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerID="0ac6a83206520064834e0bff29463989b3e98c8cdf0fd1d0270980a3b8ced778" exitCode=0 Jan 22 00:53:45 crc kubenswrapper[4829]: I0122 00:53:45.560904 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w9n4" event={"ID":"54fd8f0c-1136-4ac5-8cb3-3570a45b7790","Type":"ContainerDied","Data":"0ac6a83206520064834e0bff29463989b3e98c8cdf0fd1d0270980a3b8ced778"} Jan 22 00:53:45 crc kubenswrapper[4829]: I0122 00:53:45.561060 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w9n4" event={"ID":"54fd8f0c-1136-4ac5-8cb3-3570a45b7790","Type":"ContainerStarted","Data":"58743c0ed82dcb73eddcd12470735acf47e3b2546cfc4131174b36aac8fb3782"} Jan 22 00:53:45 crc kubenswrapper[4829]: I0122 00:53:45.562757 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 00:53:46 crc kubenswrapper[4829]: I0122 00:53:46.577515 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w9n4" event={"ID":"54fd8f0c-1136-4ac5-8cb3-3570a45b7790","Type":"ContainerStarted","Data":"74e3db6d12d4568ff608ada68c3fd52dd36bc5f575efc5dc92f1ba06a90dc76d"} Jan 22 00:53:47 crc kubenswrapper[4829]: I0122 00:53:47.589309 4829 generic.go:334] "Generic (PLEG): container finished" podID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerID="74e3db6d12d4568ff608ada68c3fd52dd36bc5f575efc5dc92f1ba06a90dc76d" exitCode=0 Jan 22 00:53:47 crc kubenswrapper[4829]: I0122 00:53:47.589364 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w9n4" event={"ID":"54fd8f0c-1136-4ac5-8cb3-3570a45b7790","Type":"ContainerDied","Data":"74e3db6d12d4568ff608ada68c3fd52dd36bc5f575efc5dc92f1ba06a90dc76d"} Jan 22 00:53:48 crc kubenswrapper[4829]: I0122 00:53:48.601042 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w9n4" event={"ID":"54fd8f0c-1136-4ac5-8cb3-3570a45b7790","Type":"ContainerStarted","Data":"460bb248b3df73cf11c3f0b675fc194507a13430112e151440c743d2985174be"} Jan 22 00:53:48 crc kubenswrapper[4829]: I0122 00:53:48.638758 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6w9n4" podStartSLOduration=2.220573492 podStartE2EDuration="4.638728819s" podCreationTimestamp="2026-01-22 00:53:44 +0000 UTC" firstStartedPulling="2026-01-22 00:53:45.562465484 +0000 UTC m=+2803.598707396" lastFinishedPulling="2026-01-22 00:53:47.980620791 +0000 UTC m=+2806.016862723" observedRunningTime="2026-01-22 00:53:48.631806265 +0000 UTC m=+2806.668048257" watchObservedRunningTime="2026-01-22 00:53:48.638728819 +0000 UTC m=+2806.674970781" Jan 22 00:53:54 crc kubenswrapper[4829]: I0122 00:53:54.923044 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:54 crc kubenswrapper[4829]: I0122 00:53:54.923360 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:53:55 crc kubenswrapper[4829]: I0122 00:53:55.993758 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6w9n4" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="registry-server" probeResult="failure" output=< Jan 22 00:53:55 crc kubenswrapper[4829]: timeout: failed to connect service ":50051" within 1s Jan 22 00:53:55 crc kubenswrapper[4829]: > Jan 22 00:54:04 crc kubenswrapper[4829]: I0122 00:54:04.963767 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:54:05 crc kubenswrapper[4829]: I0122 00:54:05.025168 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:54:05 crc kubenswrapper[4829]: I0122 00:54:05.207278 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6w9n4"] Jan 22 00:54:06 crc kubenswrapper[4829]: I0122 00:54:06.775324 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6w9n4" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="registry-server" containerID="cri-o://460bb248b3df73cf11c3f0b675fc194507a13430112e151440c743d2985174be" gracePeriod=2 Jan 22 00:54:07 crc kubenswrapper[4829]: I0122 00:54:07.792941 4829 generic.go:334] "Generic (PLEG): container finished" podID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerID="460bb248b3df73cf11c3f0b675fc194507a13430112e151440c743d2985174be" exitCode=0 Jan 22 00:54:07 crc kubenswrapper[4829]: I0122 00:54:07.793023 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w9n4" event={"ID":"54fd8f0c-1136-4ac5-8cb3-3570a45b7790","Type":"ContainerDied","Data":"460bb248b3df73cf11c3f0b675fc194507a13430112e151440c743d2985174be"} Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.345263 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.406950 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-utilities\") pod \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.407124 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-catalog-content\") pod \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.407171 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw4r2\" (UniqueName: \"kubernetes.io/projected/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-kube-api-access-nw4r2\") pod \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\" (UID: \"54fd8f0c-1136-4ac5-8cb3-3570a45b7790\") " Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.408211 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-utilities" (OuterVolumeSpecName: "utilities") pod "54fd8f0c-1136-4ac5-8cb3-3570a45b7790" (UID: "54fd8f0c-1136-4ac5-8cb3-3570a45b7790"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.413402 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-kube-api-access-nw4r2" (OuterVolumeSpecName: "kube-api-access-nw4r2") pod "54fd8f0c-1136-4ac5-8cb3-3570a45b7790" (UID: "54fd8f0c-1136-4ac5-8cb3-3570a45b7790"). InnerVolumeSpecName "kube-api-access-nw4r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.509388 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.509441 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw4r2\" (UniqueName: \"kubernetes.io/projected/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-kube-api-access-nw4r2\") on node \"crc\" DevicePath \"\"" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.522521 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54fd8f0c-1136-4ac5-8cb3-3570a45b7790" (UID: "54fd8f0c-1136-4ac5-8cb3-3570a45b7790"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.610834 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54fd8f0c-1136-4ac5-8cb3-3570a45b7790-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.836064 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6w9n4" event={"ID":"54fd8f0c-1136-4ac5-8cb3-3570a45b7790","Type":"ContainerDied","Data":"58743c0ed82dcb73eddcd12470735acf47e3b2546cfc4131174b36aac8fb3782"} Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.836169 4829 scope.go:117] "RemoveContainer" containerID="460bb248b3df73cf11c3f0b675fc194507a13430112e151440c743d2985174be" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.836187 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6w9n4" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.879852 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6w9n4"] Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.885706 4829 scope.go:117] "RemoveContainer" containerID="74e3db6d12d4568ff608ada68c3fd52dd36bc5f575efc5dc92f1ba06a90dc76d" Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.889026 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6w9n4"] Jan 22 00:54:08 crc kubenswrapper[4829]: I0122 00:54:08.928630 4829 scope.go:117] "RemoveContainer" containerID="0ac6a83206520064834e0bff29463989b3e98c8cdf0fd1d0270980a3b8ced778" Jan 22 00:54:10 crc kubenswrapper[4829]: I0122 00:54:10.569305 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" path="/var/lib/kubelet/pods/54fd8f0c-1136-4ac5-8cb3-3570a45b7790/volumes" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.433610 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4j5fs"] Jan 22 00:54:12 crc kubenswrapper[4829]: E0122 00:54:12.434521 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="extract-utilities" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.434567 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="extract-utilities" Jan 22 00:54:12 crc kubenswrapper[4829]: E0122 00:54:12.434602 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="extract-content" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.434615 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="extract-content" Jan 22 00:54:12 crc kubenswrapper[4829]: E0122 00:54:12.434634 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="registry-server" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.434648 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="registry-server" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.434887 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="54fd8f0c-1136-4ac5-8cb3-3570a45b7790" containerName="registry-server" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.437803 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.467330 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4j5fs"] Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.583408 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-catalog-content\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.583471 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsrfh\" (UniqueName: \"kubernetes.io/projected/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-kube-api-access-vsrfh\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.583700 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-utilities\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.685499 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-utilities\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.685826 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-catalog-content\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.685923 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsrfh\" (UniqueName: \"kubernetes.io/projected/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-kube-api-access-vsrfh\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.686218 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-utilities\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.686613 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-catalog-content\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.716052 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsrfh\" (UniqueName: \"kubernetes.io/projected/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-kube-api-access-vsrfh\") pod \"community-operators-4j5fs\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:12 crc kubenswrapper[4829]: I0122 00:54:12.766463 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:13 crc kubenswrapper[4829]: I0122 00:54:13.266458 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4j5fs"] Jan 22 00:54:13 crc kubenswrapper[4829]: I0122 00:54:13.886461 4829 generic.go:334] "Generic (PLEG): container finished" podID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerID="2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c" exitCode=0 Jan 22 00:54:13 crc kubenswrapper[4829]: I0122 00:54:13.886598 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4j5fs" event={"ID":"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58","Type":"ContainerDied","Data":"2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c"} Jan 22 00:54:13 crc kubenswrapper[4829]: I0122 00:54:13.886839 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4j5fs" event={"ID":"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58","Type":"ContainerStarted","Data":"730a95b3ad76e54e6f1dd1c7ec0e4a873623934ee92f0fa0f912e23de9404bf3"} Jan 22 00:54:14 crc kubenswrapper[4829]: I0122 00:54:14.901410 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4j5fs" event={"ID":"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58","Type":"ContainerStarted","Data":"9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387"} Jan 22 00:54:15 crc kubenswrapper[4829]: I0122 00:54:15.911838 4829 generic.go:334] "Generic (PLEG): container finished" podID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerID="9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387" exitCode=0 Jan 22 00:54:15 crc kubenswrapper[4829]: I0122 00:54:15.912004 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4j5fs" event={"ID":"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58","Type":"ContainerDied","Data":"9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387"} Jan 22 00:54:16 crc kubenswrapper[4829]: I0122 00:54:16.942964 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4j5fs" event={"ID":"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58","Type":"ContainerStarted","Data":"739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45"} Jan 22 00:54:16 crc kubenswrapper[4829]: I0122 00:54:16.971086 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4j5fs" podStartSLOduration=2.579383526 podStartE2EDuration="4.971065329s" podCreationTimestamp="2026-01-22 00:54:12 +0000 UTC" firstStartedPulling="2026-01-22 00:54:13.889352666 +0000 UTC m=+2831.925594618" lastFinishedPulling="2026-01-22 00:54:16.281034509 +0000 UTC m=+2834.317276421" observedRunningTime="2026-01-22 00:54:16.965406845 +0000 UTC m=+2835.001648797" watchObservedRunningTime="2026-01-22 00:54:16.971065329 +0000 UTC m=+2835.007307251" Jan 22 00:54:22 crc kubenswrapper[4829]: I0122 00:54:22.766945 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:22 crc kubenswrapper[4829]: I0122 00:54:22.768812 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:22 crc kubenswrapper[4829]: I0122 00:54:22.818935 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:23 crc kubenswrapper[4829]: I0122 00:54:23.110226 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:23 crc kubenswrapper[4829]: I0122 00:54:23.174970 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4j5fs"] Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.003753 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4j5fs" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="registry-server" containerID="cri-o://739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45" gracePeriod=2 Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.501643 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.588776 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-utilities\") pod \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.588915 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsrfh\" (UniqueName: \"kubernetes.io/projected/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-kube-api-access-vsrfh\") pod \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.589159 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-catalog-content\") pod \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\" (UID: \"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58\") " Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.589925 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-utilities" (OuterVolumeSpecName: "utilities") pod "c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" (UID: "c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.598083 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-kube-api-access-vsrfh" (OuterVolumeSpecName: "kube-api-access-vsrfh") pod "c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" (UID: "c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58"). InnerVolumeSpecName "kube-api-access-vsrfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.604188 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsrfh\" (UniqueName: \"kubernetes.io/projected/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-kube-api-access-vsrfh\") on node \"crc\" DevicePath \"\"" Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.604237 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.689496 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" (UID: "c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:54:25 crc kubenswrapper[4829]: I0122 00:54:25.707778 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.014383 4829 generic.go:334] "Generic (PLEG): container finished" podID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerID="739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45" exitCode=0 Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.014428 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4j5fs" event={"ID":"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58","Type":"ContainerDied","Data":"739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45"} Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.014458 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4j5fs" event={"ID":"c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58","Type":"ContainerDied","Data":"730a95b3ad76e54e6f1dd1c7ec0e4a873623934ee92f0fa0f912e23de9404bf3"} Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.014476 4829 scope.go:117] "RemoveContainer" containerID="739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.014599 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4j5fs" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.035337 4829 scope.go:117] "RemoveContainer" containerID="9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.060184 4829 scope.go:117] "RemoveContainer" containerID="2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.109130 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4j5fs"] Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.115397 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4j5fs"] Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.117284 4829 scope.go:117] "RemoveContainer" containerID="739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45" Jan 22 00:54:26 crc kubenswrapper[4829]: E0122 00:54:26.121954 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45\": container with ID starting with 739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45 not found: ID does not exist" containerID="739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.121998 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45"} err="failed to get container status \"739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45\": rpc error: code = NotFound desc = could not find container \"739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45\": container with ID starting with 739bf6bae08667cdfe5232787d784501b6092c7d9b92674b3484553089c08b45 not found: ID does not exist" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.122055 4829 scope.go:117] "RemoveContainer" containerID="9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387" Jan 22 00:54:26 crc kubenswrapper[4829]: E0122 00:54:26.122421 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387\": container with ID starting with 9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387 not found: ID does not exist" containerID="9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.122448 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387"} err="failed to get container status \"9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387\": rpc error: code = NotFound desc = could not find container \"9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387\": container with ID starting with 9acf99bc92b2a4fb36a8dcf072f8163b8b21dd964d0d6f2e58bf5ea428dde387 not found: ID does not exist" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.122466 4829 scope.go:117] "RemoveContainer" containerID="2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c" Jan 22 00:54:26 crc kubenswrapper[4829]: E0122 00:54:26.122811 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c\": container with ID starting with 2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c not found: ID does not exist" containerID="2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.122833 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c"} err="failed to get container status \"2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c\": rpc error: code = NotFound desc = could not find container \"2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c\": container with ID starting with 2fb0c88d295a0ed37e0cdb32b8144a8c6afa508629f1b85fad983c0cc485c23c not found: ID does not exist" Jan 22 00:54:26 crc kubenswrapper[4829]: I0122 00:54:26.564719 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" path="/var/lib/kubelet/pods/c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58/volumes" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.195326 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bt6cf"] Jan 22 00:54:49 crc kubenswrapper[4829]: E0122 00:54:49.201307 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="registry-server" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.201454 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="registry-server" Jan 22 00:54:49 crc kubenswrapper[4829]: E0122 00:54:49.201599 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="extract-utilities" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.201723 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="extract-utilities" Jan 22 00:54:49 crc kubenswrapper[4829]: E0122 00:54:49.201828 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="extract-content" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.201932 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="extract-content" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.202263 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="c66bcabd-3d35-4f9a-a0e8-858a6ea1bf58" containerName="registry-server" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.203838 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.215210 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bt6cf"] Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.263891 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-catalog-content\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.264209 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-utilities\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.264378 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsvqc\" (UniqueName: \"kubernetes.io/projected/eceedb44-4886-4f6f-91df-5d699980fa30-kube-api-access-zsvqc\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.365846 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsvqc\" (UniqueName: \"kubernetes.io/projected/eceedb44-4886-4f6f-91df-5d699980fa30-kube-api-access-zsvqc\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.365976 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-catalog-content\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.366766 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-catalog-content\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.366888 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-utilities\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.367343 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-utilities\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.392985 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsvqc\" (UniqueName: \"kubernetes.io/projected/eceedb44-4886-4f6f-91df-5d699980fa30-kube-api-access-zsvqc\") pod \"certified-operators-bt6cf\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.527603 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:49 crc kubenswrapper[4829]: I0122 00:54:49.797639 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bt6cf"] Jan 22 00:54:50 crc kubenswrapper[4829]: I0122 00:54:50.313216 4829 generic.go:334] "Generic (PLEG): container finished" podID="eceedb44-4886-4f6f-91df-5d699980fa30" containerID="16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a" exitCode=0 Jan 22 00:54:50 crc kubenswrapper[4829]: I0122 00:54:50.313411 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bt6cf" event={"ID":"eceedb44-4886-4f6f-91df-5d699980fa30","Type":"ContainerDied","Data":"16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a"} Jan 22 00:54:50 crc kubenswrapper[4829]: I0122 00:54:50.313524 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bt6cf" event={"ID":"eceedb44-4886-4f6f-91df-5d699980fa30","Type":"ContainerStarted","Data":"c65e8b7c7a42e1dc0140e81aa18c50a09d748035b4e18d9e99a60e887aac655a"} Jan 22 00:54:51 crc kubenswrapper[4829]: I0122 00:54:51.324986 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bt6cf" event={"ID":"eceedb44-4886-4f6f-91df-5d699980fa30","Type":"ContainerStarted","Data":"9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0"} Jan 22 00:54:52 crc kubenswrapper[4829]: I0122 00:54:52.337434 4829 generic.go:334] "Generic (PLEG): container finished" podID="eceedb44-4886-4f6f-91df-5d699980fa30" containerID="9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0" exitCode=0 Jan 22 00:54:52 crc kubenswrapper[4829]: I0122 00:54:52.337521 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bt6cf" event={"ID":"eceedb44-4886-4f6f-91df-5d699980fa30","Type":"ContainerDied","Data":"9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0"} Jan 22 00:54:53 crc kubenswrapper[4829]: I0122 00:54:53.353456 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bt6cf" event={"ID":"eceedb44-4886-4f6f-91df-5d699980fa30","Type":"ContainerStarted","Data":"8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663"} Jan 22 00:54:53 crc kubenswrapper[4829]: I0122 00:54:53.384423 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bt6cf" podStartSLOduration=1.967198202 podStartE2EDuration="4.384399031s" podCreationTimestamp="2026-01-22 00:54:49 +0000 UTC" firstStartedPulling="2026-01-22 00:54:50.314967566 +0000 UTC m=+2868.351209478" lastFinishedPulling="2026-01-22 00:54:52.732168405 +0000 UTC m=+2870.768410307" observedRunningTime="2026-01-22 00:54:53.376775537 +0000 UTC m=+2871.413017459" watchObservedRunningTime="2026-01-22 00:54:53.384399031 +0000 UTC m=+2871.420640943" Jan 22 00:54:59 crc kubenswrapper[4829]: I0122 00:54:59.528267 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:59 crc kubenswrapper[4829]: I0122 00:54:59.530382 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:54:59 crc kubenswrapper[4829]: I0122 00:54:59.586658 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:55:00 crc kubenswrapper[4829]: I0122 00:55:00.550810 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:55:00 crc kubenswrapper[4829]: I0122 00:55:00.596438 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bt6cf"] Jan 22 00:55:02 crc kubenswrapper[4829]: I0122 00:55:02.426747 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bt6cf" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="registry-server" containerID="cri-o://8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663" gracePeriod=2 Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.347479 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.434438 4829 generic.go:334] "Generic (PLEG): container finished" podID="eceedb44-4886-4f6f-91df-5d699980fa30" containerID="8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663" exitCode=0 Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.434488 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bt6cf" event={"ID":"eceedb44-4886-4f6f-91df-5d699980fa30","Type":"ContainerDied","Data":"8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663"} Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.434523 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bt6cf" event={"ID":"eceedb44-4886-4f6f-91df-5d699980fa30","Type":"ContainerDied","Data":"c65e8b7c7a42e1dc0140e81aa18c50a09d748035b4e18d9e99a60e887aac655a"} Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.434564 4829 scope.go:117] "RemoveContainer" containerID="8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.434707 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bt6cf" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.491228 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-utilities\") pod \"eceedb44-4886-4f6f-91df-5d699980fa30\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.491391 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsvqc\" (UniqueName: \"kubernetes.io/projected/eceedb44-4886-4f6f-91df-5d699980fa30-kube-api-access-zsvqc\") pod \"eceedb44-4886-4f6f-91df-5d699980fa30\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.491665 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-catalog-content\") pod \"eceedb44-4886-4f6f-91df-5d699980fa30\" (UID: \"eceedb44-4886-4f6f-91df-5d699980fa30\") " Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.492366 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-utilities" (OuterVolumeSpecName: "utilities") pod "eceedb44-4886-4f6f-91df-5d699980fa30" (UID: "eceedb44-4886-4f6f-91df-5d699980fa30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.497810 4829 scope.go:117] "RemoveContainer" containerID="9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.521150 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eceedb44-4886-4f6f-91df-5d699980fa30-kube-api-access-zsvqc" (OuterVolumeSpecName: "kube-api-access-zsvqc") pod "eceedb44-4886-4f6f-91df-5d699980fa30" (UID: "eceedb44-4886-4f6f-91df-5d699980fa30"). InnerVolumeSpecName "kube-api-access-zsvqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.524838 4829 scope.go:117] "RemoveContainer" containerID="16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.561471 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eceedb44-4886-4f6f-91df-5d699980fa30" (UID: "eceedb44-4886-4f6f-91df-5d699980fa30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.594605 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.594631 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsvqc\" (UniqueName: \"kubernetes.io/projected/eceedb44-4886-4f6f-91df-5d699980fa30-kube-api-access-zsvqc\") on node \"crc\" DevicePath \"\"" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.594640 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eceedb44-4886-4f6f-91df-5d699980fa30-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.597269 4829 scope.go:117] "RemoveContainer" containerID="8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663" Jan 22 00:55:03 crc kubenswrapper[4829]: E0122 00:55:03.597840 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663\": container with ID starting with 8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663 not found: ID does not exist" containerID="8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.597884 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663"} err="failed to get container status \"8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663\": rpc error: code = NotFound desc = could not find container \"8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663\": container with ID starting with 8d7d93e9aeaf15fc51e0a81a7577cc5bfa6e4aecc81e188566fc3a8ea5be7663 not found: ID does not exist" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.597916 4829 scope.go:117] "RemoveContainer" containerID="9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0" Jan 22 00:55:03 crc kubenswrapper[4829]: E0122 00:55:03.598227 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0\": container with ID starting with 9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0 not found: ID does not exist" containerID="9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.598254 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0"} err="failed to get container status \"9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0\": rpc error: code = NotFound desc = could not find container \"9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0\": container with ID starting with 9e5d2a0d23b91c0ccd1c9c3bffe73de6f32b4a3d0bc6a10d9cd187d5e4919fa0 not found: ID does not exist" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.598270 4829 scope.go:117] "RemoveContainer" containerID="16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a" Jan 22 00:55:03 crc kubenswrapper[4829]: E0122 00:55:03.598565 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a\": container with ID starting with 16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a not found: ID does not exist" containerID="16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.598594 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a"} err="failed to get container status \"16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a\": rpc error: code = NotFound desc = could not find container \"16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a\": container with ID starting with 16ab992836792f0564db51681f05a97ecb2b3b0fd355d882957fc6a310fbf34a not found: ID does not exist" Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.794143 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bt6cf"] Jan 22 00:55:03 crc kubenswrapper[4829]: I0122 00:55:03.809108 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bt6cf"] Jan 22 00:55:04 crc kubenswrapper[4829]: I0122 00:55:04.658286 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" path="/var/lib/kubelet/pods/eceedb44-4886-4f6f-91df-5d699980fa30/volumes" Jan 22 00:55:04 crc kubenswrapper[4829]: I0122 00:55:04.659516 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:55:04 crc kubenswrapper[4829]: I0122 00:55:04.659604 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:55:34 crc kubenswrapper[4829]: I0122 00:55:34.658528 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:55:34 crc kubenswrapper[4829]: I0122 00:55:34.659229 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:56:04 crc kubenswrapper[4829]: I0122 00:56:04.659174 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 00:56:04 crc kubenswrapper[4829]: I0122 00:56:04.660071 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 00:56:04 crc kubenswrapper[4829]: I0122 00:56:04.660158 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 00:56:04 crc kubenswrapper[4829]: I0122 00:56:04.661508 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 00:56:04 crc kubenswrapper[4829]: I0122 00:56:04.661695 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" gracePeriod=600 Jan 22 00:56:04 crc kubenswrapper[4829]: E0122 00:56:04.790066 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:56:05 crc kubenswrapper[4829]: I0122 00:56:05.045396 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" exitCode=0 Jan 22 00:56:05 crc kubenswrapper[4829]: I0122 00:56:05.045453 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f"} Jan 22 00:56:05 crc kubenswrapper[4829]: I0122 00:56:05.045495 4829 scope.go:117] "RemoveContainer" containerID="15982f19cf72331ab8bf2e1cf76da92034f496939a60c8859d8942ae9cdedd3d" Jan 22 00:56:05 crc kubenswrapper[4829]: I0122 00:56:05.046302 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:56:05 crc kubenswrapper[4829]: E0122 00:56:05.046638 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:56:15 crc kubenswrapper[4829]: I0122 00:56:15.554338 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:56:15 crc kubenswrapper[4829]: E0122 00:56:15.555776 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:56:30 crc kubenswrapper[4829]: I0122 00:56:30.557026 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:56:30 crc kubenswrapper[4829]: E0122 00:56:30.557942 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:56:45 crc kubenswrapper[4829]: I0122 00:56:45.553872 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:56:45 crc kubenswrapper[4829]: E0122 00:56:45.554721 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:56:58 crc kubenswrapper[4829]: I0122 00:56:58.554317 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:56:58 crc kubenswrapper[4829]: E0122 00:56:58.556214 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:57:13 crc kubenswrapper[4829]: I0122 00:57:13.553887 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:57:13 crc kubenswrapper[4829]: E0122 00:57:13.555109 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:57:24 crc kubenswrapper[4829]: I0122 00:57:24.554126 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:57:24 crc kubenswrapper[4829]: E0122 00:57:24.554973 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:57:39 crc kubenswrapper[4829]: I0122 00:57:39.554501 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:57:39 crc kubenswrapper[4829]: E0122 00:57:39.555429 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:57:51 crc kubenswrapper[4829]: I0122 00:57:51.554270 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:57:51 crc kubenswrapper[4829]: E0122 00:57:51.555029 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:58:05 crc kubenswrapper[4829]: I0122 00:58:05.552980 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:58:05 crc kubenswrapper[4829]: E0122 00:58:05.553603 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:58:18 crc kubenswrapper[4829]: I0122 00:58:18.554707 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:58:18 crc kubenswrapper[4829]: E0122 00:58:18.555679 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:58:31 crc kubenswrapper[4829]: I0122 00:58:31.567905 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:58:31 crc kubenswrapper[4829]: E0122 00:58:31.568817 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:58:42 crc kubenswrapper[4829]: I0122 00:58:42.559021 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:58:42 crc kubenswrapper[4829]: E0122 00:58:42.560211 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:58:56 crc kubenswrapper[4829]: I0122 00:58:56.553750 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:58:56 crc kubenswrapper[4829]: E0122 00:58:56.554599 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:59:10 crc kubenswrapper[4829]: I0122 00:59:10.559099 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:59:10 crc kubenswrapper[4829]: E0122 00:59:10.559782 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:59:22 crc kubenswrapper[4829]: I0122 00:59:22.561054 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:59:22 crc kubenswrapper[4829]: E0122 00:59:22.561994 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:59:34 crc kubenswrapper[4829]: I0122 00:59:34.555087 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:59:34 crc kubenswrapper[4829]: E0122 00:59:34.555799 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:59:47 crc kubenswrapper[4829]: I0122 00:59:47.553507 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:59:47 crc kubenswrapper[4829]: E0122 00:59:47.554505 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 00:59:59 crc kubenswrapper[4829]: I0122 00:59:59.554357 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 00:59:59 crc kubenswrapper[4829]: E0122 00:59:59.555341 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.170517 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl"] Jan 22 01:00:00 crc kubenswrapper[4829]: E0122 01:00:00.170885 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="extract-utilities" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.170908 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="extract-utilities" Jan 22 01:00:00 crc kubenswrapper[4829]: E0122 01:00:00.170924 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="registry-server" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.170932 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="registry-server" Jan 22 01:00:00 crc kubenswrapper[4829]: E0122 01:00:00.170950 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="extract-content" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.170958 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="extract-content" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.171109 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="eceedb44-4886-4f6f-91df-5d699980fa30" containerName="registry-server" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.171728 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.174449 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.174587 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.186071 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl"] Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.321675 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79xq5\" (UniqueName: \"kubernetes.io/projected/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-kube-api-access-79xq5\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.321750 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-config-volume\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.321794 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-secret-volume\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.423180 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79xq5\" (UniqueName: \"kubernetes.io/projected/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-kube-api-access-79xq5\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.423307 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-config-volume\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.423400 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-secret-volume\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.425771 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-config-volume\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.432301 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-secret-volume\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.455885 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79xq5\" (UniqueName: \"kubernetes.io/projected/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-kube-api-access-79xq5\") pod \"collect-profiles-29484060-wmmnl\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.507852 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:00 crc kubenswrapper[4829]: I0122 01:00:00.949898 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl"] Jan 22 01:00:01 crc kubenswrapper[4829]: I0122 01:00:01.621264 4829 generic.go:334] "Generic (PLEG): container finished" podID="fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e" containerID="9a6c06f4defd6b4feacbfee5c7562c1ad973145f4863aee484cfab3459b8dca6" exitCode=0 Jan 22 01:00:01 crc kubenswrapper[4829]: I0122 01:00:01.621373 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" event={"ID":"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e","Type":"ContainerDied","Data":"9a6c06f4defd6b4feacbfee5c7562c1ad973145f4863aee484cfab3459b8dca6"} Jan 22 01:00:01 crc kubenswrapper[4829]: I0122 01:00:01.621663 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" event={"ID":"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e","Type":"ContainerStarted","Data":"d9255d12391e40354548d2cad4c75c4f7dae77e8bf7e25070524ee5fb9cec369"} Jan 22 01:00:02 crc kubenswrapper[4829]: I0122 01:00:02.973513 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.171862 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-config-volume\") pod \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.172007 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79xq5\" (UniqueName: \"kubernetes.io/projected/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-kube-api-access-79xq5\") pod \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.172065 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-secret-volume\") pod \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\" (UID: \"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e\") " Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.172938 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-config-volume" (OuterVolumeSpecName: "config-volume") pod "fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e" (UID: "fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.177366 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-kube-api-access-79xq5" (OuterVolumeSpecName: "kube-api-access-79xq5") pod "fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e" (UID: "fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e"). InnerVolumeSpecName "kube-api-access-79xq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.177881 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e" (UID: "fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.273723 4829 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.274157 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79xq5\" (UniqueName: \"kubernetes.io/projected/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-kube-api-access-79xq5\") on node \"crc\" DevicePath \"\"" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.274183 4829 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.639405 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" event={"ID":"fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e","Type":"ContainerDied","Data":"d9255d12391e40354548d2cad4c75c4f7dae77e8bf7e25070524ee5fb9cec369"} Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.639455 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9255d12391e40354548d2cad4c75c4f7dae77e8bf7e25070524ee5fb9cec369" Jan 22 01:00:03 crc kubenswrapper[4829]: I0122 01:00:03.639485 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484060-wmmnl" Jan 22 01:00:04 crc kubenswrapper[4829]: I0122 01:00:04.067319 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9"] Jan 22 01:00:04 crc kubenswrapper[4829]: I0122 01:00:04.074070 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484015-jmwr9"] Jan 22 01:00:04 crc kubenswrapper[4829]: I0122 01:00:04.565743 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ae46e15-a52a-445b-8d48-ad5ba5415ed0" path="/var/lib/kubelet/pods/3ae46e15-a52a-445b-8d48-ad5ba5415ed0/volumes" Jan 22 01:00:13 crc kubenswrapper[4829]: I0122 01:00:13.554423 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 01:00:13 crc kubenswrapper[4829]: E0122 01:00:13.555512 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:00:26 crc kubenswrapper[4829]: I0122 01:00:26.558623 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 01:00:26 crc kubenswrapper[4829]: E0122 01:00:26.559652 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:00:39 crc kubenswrapper[4829]: I0122 01:00:39.555294 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 01:00:39 crc kubenswrapper[4829]: E0122 01:00:39.556486 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:00:52 crc kubenswrapper[4829]: I0122 01:00:52.559969 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 01:00:52 crc kubenswrapper[4829]: E0122 01:00:52.561225 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:01:03 crc kubenswrapper[4829]: I0122 01:01:03.556805 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 01:01:03 crc kubenswrapper[4829]: E0122 01:01:03.557663 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:01:03 crc kubenswrapper[4829]: I0122 01:01:03.975068 4829 scope.go:117] "RemoveContainer" containerID="4aa378a603bff3188493fd4abed3143cc4ceed135d138543f2cbd48941d6e91e" Jan 22 01:01:17 crc kubenswrapper[4829]: I0122 01:01:17.554313 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 01:01:18 crc kubenswrapper[4829]: I0122 01:01:18.357267 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"1a18f67d780b8159297dbb4fe83b23a2b1970de46ebbe229f30e582bf2037a16"} Jan 22 01:03:34 crc kubenswrapper[4829]: I0122 01:03:34.659030 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:03:34 crc kubenswrapper[4829]: I0122 01:03:34.659482 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:04:04 crc kubenswrapper[4829]: I0122 01:04:04.659026 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:04:04 crc kubenswrapper[4829]: I0122 01:04:04.659758 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.511284 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4lcfd"] Jan 22 01:04:28 crc kubenswrapper[4829]: E0122 01:04:28.512691 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e" containerName="collect-profiles" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.512727 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e" containerName="collect-profiles" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.513069 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe52b4a6-74d8-4b3d-83cb-f64fbbf7214e" containerName="collect-profiles" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.515378 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.520920 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4lcfd"] Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.560705 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-utilities\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.560839 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-catalog-content\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.561038 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgxhm\" (UniqueName: \"kubernetes.io/projected/f3e85d11-53c4-453c-894c-04ccc1caaa94-kube-api-access-mgxhm\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.662250 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-utilities\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.662307 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-catalog-content\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.662378 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgxhm\" (UniqueName: \"kubernetes.io/projected/f3e85d11-53c4-453c-894c-04ccc1caaa94-kube-api-access-mgxhm\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.663160 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-utilities\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.663423 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-catalog-content\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.688945 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgxhm\" (UniqueName: \"kubernetes.io/projected/f3e85d11-53c4-453c-894c-04ccc1caaa94-kube-api-access-mgxhm\") pod \"community-operators-4lcfd\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:28 crc kubenswrapper[4829]: I0122 01:04:28.852477 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:29 crc kubenswrapper[4829]: I0122 01:04:29.143732 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4lcfd"] Jan 22 01:04:29 crc kubenswrapper[4829]: I0122 01:04:29.214686 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4lcfd" event={"ID":"f3e85d11-53c4-453c-894c-04ccc1caaa94","Type":"ContainerStarted","Data":"3e9af1e366b66fbdf5f1e0f1e71f00d048e531be8fad11932ba31a185bbc6edf"} Jan 22 01:04:30 crc kubenswrapper[4829]: I0122 01:04:30.228086 4829 generic.go:334] "Generic (PLEG): container finished" podID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerID="45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487" exitCode=0 Jan 22 01:04:30 crc kubenswrapper[4829]: I0122 01:04:30.228145 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4lcfd" event={"ID":"f3e85d11-53c4-453c-894c-04ccc1caaa94","Type":"ContainerDied","Data":"45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487"} Jan 22 01:04:30 crc kubenswrapper[4829]: I0122 01:04:30.231307 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 01:04:31 crc kubenswrapper[4829]: I0122 01:04:31.239497 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4lcfd" event={"ID":"f3e85d11-53c4-453c-894c-04ccc1caaa94","Type":"ContainerStarted","Data":"995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9"} Jan 22 01:04:32 crc kubenswrapper[4829]: I0122 01:04:32.254329 4829 generic.go:334] "Generic (PLEG): container finished" podID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerID="995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9" exitCode=0 Jan 22 01:04:32 crc kubenswrapper[4829]: I0122 01:04:32.254390 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4lcfd" event={"ID":"f3e85d11-53c4-453c-894c-04ccc1caaa94","Type":"ContainerDied","Data":"995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9"} Jan 22 01:04:33 crc kubenswrapper[4829]: I0122 01:04:33.267414 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4lcfd" event={"ID":"f3e85d11-53c4-453c-894c-04ccc1caaa94","Type":"ContainerStarted","Data":"39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce"} Jan 22 01:04:33 crc kubenswrapper[4829]: I0122 01:04:33.293433 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4lcfd" podStartSLOduration=2.845406312 podStartE2EDuration="5.293415828s" podCreationTimestamp="2026-01-22 01:04:28 +0000 UTC" firstStartedPulling="2026-01-22 01:04:30.230526734 +0000 UTC m=+3448.266768686" lastFinishedPulling="2026-01-22 01:04:32.67853626 +0000 UTC m=+3450.714778202" observedRunningTime="2026-01-22 01:04:33.292105817 +0000 UTC m=+3451.328347739" watchObservedRunningTime="2026-01-22 01:04:33.293415828 +0000 UTC m=+3451.329657750" Jan 22 01:04:34 crc kubenswrapper[4829]: I0122 01:04:34.658501 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:04:34 crc kubenswrapper[4829]: I0122 01:04:34.658593 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:04:34 crc kubenswrapper[4829]: I0122 01:04:34.658653 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 01:04:34 crc kubenswrapper[4829]: I0122 01:04:34.659343 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a18f67d780b8159297dbb4fe83b23a2b1970de46ebbe229f30e582bf2037a16"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 01:04:34 crc kubenswrapper[4829]: I0122 01:04:34.659407 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://1a18f67d780b8159297dbb4fe83b23a2b1970de46ebbe229f30e582bf2037a16" gracePeriod=600 Jan 22 01:04:35 crc kubenswrapper[4829]: I0122 01:04:35.288094 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="1a18f67d780b8159297dbb4fe83b23a2b1970de46ebbe229f30e582bf2037a16" exitCode=0 Jan 22 01:04:35 crc kubenswrapper[4829]: I0122 01:04:35.288170 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"1a18f67d780b8159297dbb4fe83b23a2b1970de46ebbe229f30e582bf2037a16"} Jan 22 01:04:35 crc kubenswrapper[4829]: I0122 01:04:35.288599 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6"} Jan 22 01:04:35 crc kubenswrapper[4829]: I0122 01:04:35.288646 4829 scope.go:117] "RemoveContainer" containerID="6c27fd20e5cb3ec717128d3b87bc07533712db460a098243d505d59c1a5b010f" Jan 22 01:04:38 crc kubenswrapper[4829]: I0122 01:04:38.853206 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:38 crc kubenswrapper[4829]: I0122 01:04:38.854024 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:38 crc kubenswrapper[4829]: I0122 01:04:38.930148 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:39 crc kubenswrapper[4829]: I0122 01:04:39.398820 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:39 crc kubenswrapper[4829]: I0122 01:04:39.468934 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4lcfd"] Jan 22 01:04:41 crc kubenswrapper[4829]: I0122 01:04:41.351079 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4lcfd" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="registry-server" containerID="cri-o://39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce" gracePeriod=2 Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.253706 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.303808 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-utilities\") pod \"f3e85d11-53c4-453c-894c-04ccc1caaa94\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.303925 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-catalog-content\") pod \"f3e85d11-53c4-453c-894c-04ccc1caaa94\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.304029 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgxhm\" (UniqueName: \"kubernetes.io/projected/f3e85d11-53c4-453c-894c-04ccc1caaa94-kube-api-access-mgxhm\") pod \"f3e85d11-53c4-453c-894c-04ccc1caaa94\" (UID: \"f3e85d11-53c4-453c-894c-04ccc1caaa94\") " Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.305405 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-utilities" (OuterVolumeSpecName: "utilities") pod "f3e85d11-53c4-453c-894c-04ccc1caaa94" (UID: "f3e85d11-53c4-453c-894c-04ccc1caaa94"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.311671 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3e85d11-53c4-453c-894c-04ccc1caaa94-kube-api-access-mgxhm" (OuterVolumeSpecName: "kube-api-access-mgxhm") pod "f3e85d11-53c4-453c-894c-04ccc1caaa94" (UID: "f3e85d11-53c4-453c-894c-04ccc1caaa94"). InnerVolumeSpecName "kube-api-access-mgxhm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.362811 4829 generic.go:334] "Generic (PLEG): container finished" podID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerID="39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce" exitCode=0 Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.362869 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4lcfd" event={"ID":"f3e85d11-53c4-453c-894c-04ccc1caaa94","Type":"ContainerDied","Data":"39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce"} Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.362927 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4lcfd" event={"ID":"f3e85d11-53c4-453c-894c-04ccc1caaa94","Type":"ContainerDied","Data":"3e9af1e366b66fbdf5f1e0f1e71f00d048e531be8fad11932ba31a185bbc6edf"} Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.362923 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4lcfd" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.362950 4829 scope.go:117] "RemoveContainer" containerID="39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.365707 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f3e85d11-53c4-453c-894c-04ccc1caaa94" (UID: "f3e85d11-53c4-453c-894c-04ccc1caaa94"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.389521 4829 scope.go:117] "RemoveContainer" containerID="995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.405247 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.405275 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f3e85d11-53c4-453c-894c-04ccc1caaa94-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.405286 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgxhm\" (UniqueName: \"kubernetes.io/projected/f3e85d11-53c4-453c-894c-04ccc1caaa94-kube-api-access-mgxhm\") on node \"crc\" DevicePath \"\"" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.413096 4829 scope.go:117] "RemoveContainer" containerID="45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.441664 4829 scope.go:117] "RemoveContainer" containerID="39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce" Jan 22 01:04:42 crc kubenswrapper[4829]: E0122 01:04:42.442204 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce\": container with ID starting with 39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce not found: ID does not exist" containerID="39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.442232 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce"} err="failed to get container status \"39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce\": rpc error: code = NotFound desc = could not find container \"39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce\": container with ID starting with 39fe75d2463a5a85274c74c0138ec2177bc0756e5bdb534eacacc116340872ce not found: ID does not exist" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.442253 4829 scope.go:117] "RemoveContainer" containerID="995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9" Jan 22 01:04:42 crc kubenswrapper[4829]: E0122 01:04:42.443049 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9\": container with ID starting with 995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9 not found: ID does not exist" containerID="995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.443086 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9"} err="failed to get container status \"995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9\": rpc error: code = NotFound desc = could not find container \"995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9\": container with ID starting with 995d088e30b1089bd0bbfca9ce80d594822aea5dcad5752f725d749c01a091a9 not found: ID does not exist" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.443099 4829 scope.go:117] "RemoveContainer" containerID="45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487" Jan 22 01:04:42 crc kubenswrapper[4829]: E0122 01:04:42.443610 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487\": container with ID starting with 45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487 not found: ID does not exist" containerID="45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.443633 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487"} err="failed to get container status \"45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487\": rpc error: code = NotFound desc = could not find container \"45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487\": container with ID starting with 45b426c9c0637872267ab31ea1fc465d2ef72bb6e308d65355e74ee5cb557487 not found: ID does not exist" Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.698048 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4lcfd"] Jan 22 01:04:42 crc kubenswrapper[4829]: I0122 01:04:42.708695 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4lcfd"] Jan 22 01:04:44 crc kubenswrapper[4829]: I0122 01:04:44.567909 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" path="/var/lib/kubelet/pods/f3e85d11-53c4-453c-894c-04ccc1caaa94/volumes" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.269145 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s5xs5"] Jan 22 01:04:55 crc kubenswrapper[4829]: E0122 01:04:55.270686 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="extract-content" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.270711 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="extract-content" Jan 22 01:04:55 crc kubenswrapper[4829]: E0122 01:04:55.270743 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="extract-utilities" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.270754 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="extract-utilities" Jan 22 01:04:55 crc kubenswrapper[4829]: E0122 01:04:55.270775 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="registry-server" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.270786 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="registry-server" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.270975 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3e85d11-53c4-453c-894c-04ccc1caaa94" containerName="registry-server" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.272431 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.294819 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s5xs5"] Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.422805 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-catalog-content\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.423041 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-utilities\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.423113 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c6fz\" (UniqueName: \"kubernetes.io/projected/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-kube-api-access-6c6fz\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.524846 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-catalog-content\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.524976 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-utilities\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.525016 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c6fz\" (UniqueName: \"kubernetes.io/projected/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-kube-api-access-6c6fz\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.525353 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-catalog-content\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.525361 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-utilities\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.546188 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c6fz\" (UniqueName: \"kubernetes.io/projected/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-kube-api-access-6c6fz\") pod \"redhat-operators-s5xs5\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.595603 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:04:55 crc kubenswrapper[4829]: I0122 01:04:55.887071 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s5xs5"] Jan 22 01:04:56 crc kubenswrapper[4829]: I0122 01:04:56.516053 4829 generic.go:334] "Generic (PLEG): container finished" podID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerID="dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f" exitCode=0 Jan 22 01:04:56 crc kubenswrapper[4829]: I0122 01:04:56.520588 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5xs5" event={"ID":"205f187d-3f3d-4b9e-bdbf-83e8037b56b0","Type":"ContainerDied","Data":"dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f"} Jan 22 01:04:56 crc kubenswrapper[4829]: I0122 01:04:56.520663 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5xs5" event={"ID":"205f187d-3f3d-4b9e-bdbf-83e8037b56b0","Type":"ContainerStarted","Data":"e0d39b51b32d04f6535058c5d0078756ef3f143c6995997e4d87cf308b10c929"} Jan 22 01:04:57 crc kubenswrapper[4829]: I0122 01:04:57.528338 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5xs5" event={"ID":"205f187d-3f3d-4b9e-bdbf-83e8037b56b0","Type":"ContainerStarted","Data":"a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10"} Jan 22 01:04:58 crc kubenswrapper[4829]: I0122 01:04:58.542820 4829 generic.go:334] "Generic (PLEG): container finished" podID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerID="a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10" exitCode=0 Jan 22 01:04:58 crc kubenswrapper[4829]: I0122 01:04:58.542941 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5xs5" event={"ID":"205f187d-3f3d-4b9e-bdbf-83e8037b56b0","Type":"ContainerDied","Data":"a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10"} Jan 22 01:04:59 crc kubenswrapper[4829]: I0122 01:04:59.570219 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5xs5" event={"ID":"205f187d-3f3d-4b9e-bdbf-83e8037b56b0","Type":"ContainerStarted","Data":"33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037"} Jan 22 01:04:59 crc kubenswrapper[4829]: I0122 01:04:59.600010 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s5xs5" podStartSLOduration=2.191816435 podStartE2EDuration="4.599978748s" podCreationTimestamp="2026-01-22 01:04:55 +0000 UTC" firstStartedPulling="2026-01-22 01:04:56.52077299 +0000 UTC m=+3474.557014902" lastFinishedPulling="2026-01-22 01:04:58.928935273 +0000 UTC m=+3476.965177215" observedRunningTime="2026-01-22 01:04:59.589505604 +0000 UTC m=+3477.625747576" watchObservedRunningTime="2026-01-22 01:04:59.599978748 +0000 UTC m=+3477.636220700" Jan 22 01:05:05 crc kubenswrapper[4829]: I0122 01:05:05.596810 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:05:05 crc kubenswrapper[4829]: I0122 01:05:05.598179 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:05:06 crc kubenswrapper[4829]: I0122 01:05:06.681467 4829 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-s5xs5" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="registry-server" probeResult="failure" output=< Jan 22 01:05:06 crc kubenswrapper[4829]: timeout: failed to connect service ":50051" within 1s Jan 22 01:05:06 crc kubenswrapper[4829]: > Jan 22 01:05:15 crc kubenswrapper[4829]: I0122 01:05:15.647773 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:05:15 crc kubenswrapper[4829]: I0122 01:05:15.704572 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:05:15 crc kubenswrapper[4829]: I0122 01:05:15.898955 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s5xs5"] Jan 22 01:05:16 crc kubenswrapper[4829]: I0122 01:05:16.734773 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s5xs5" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="registry-server" containerID="cri-o://33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037" gracePeriod=2 Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.124859 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.186886 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-catalog-content\") pod \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.187040 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6c6fz\" (UniqueName: \"kubernetes.io/projected/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-kube-api-access-6c6fz\") pod \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.187117 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-utilities\") pod \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\" (UID: \"205f187d-3f3d-4b9e-bdbf-83e8037b56b0\") " Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.188369 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-utilities" (OuterVolumeSpecName: "utilities") pod "205f187d-3f3d-4b9e-bdbf-83e8037b56b0" (UID: "205f187d-3f3d-4b9e-bdbf-83e8037b56b0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.194528 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-kube-api-access-6c6fz" (OuterVolumeSpecName: "kube-api-access-6c6fz") pod "205f187d-3f3d-4b9e-bdbf-83e8037b56b0" (UID: "205f187d-3f3d-4b9e-bdbf-83e8037b56b0"). InnerVolumeSpecName "kube-api-access-6c6fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.289266 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6c6fz\" (UniqueName: \"kubernetes.io/projected/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-kube-api-access-6c6fz\") on node \"crc\" DevicePath \"\"" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.289331 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.349244 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "205f187d-3f3d-4b9e-bdbf-83e8037b56b0" (UID: "205f187d-3f3d-4b9e-bdbf-83e8037b56b0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.390940 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/205f187d-3f3d-4b9e-bdbf-83e8037b56b0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.749892 4829 generic.go:334] "Generic (PLEG): container finished" podID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerID="33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037" exitCode=0 Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.749943 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5xs5" event={"ID":"205f187d-3f3d-4b9e-bdbf-83e8037b56b0","Type":"ContainerDied","Data":"33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037"} Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.749990 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s5xs5" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.750017 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s5xs5" event={"ID":"205f187d-3f3d-4b9e-bdbf-83e8037b56b0","Type":"ContainerDied","Data":"e0d39b51b32d04f6535058c5d0078756ef3f143c6995997e4d87cf308b10c929"} Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.750051 4829 scope.go:117] "RemoveContainer" containerID="33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.785926 4829 scope.go:117] "RemoveContainer" containerID="a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.805197 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s5xs5"] Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.826514 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s5xs5"] Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.833492 4829 scope.go:117] "RemoveContainer" containerID="dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.862043 4829 scope.go:117] "RemoveContainer" containerID="33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037" Jan 22 01:05:17 crc kubenswrapper[4829]: E0122 01:05:17.862589 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037\": container with ID starting with 33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037 not found: ID does not exist" containerID="33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.862650 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037"} err="failed to get container status \"33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037\": rpc error: code = NotFound desc = could not find container \"33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037\": container with ID starting with 33a5853f6b543b928cb770d84f3f22e33a984faae6efdd038cb5c8d8b5cfc037 not found: ID does not exist" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.862691 4829 scope.go:117] "RemoveContainer" containerID="a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10" Jan 22 01:05:17 crc kubenswrapper[4829]: E0122 01:05:17.863378 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10\": container with ID starting with a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10 not found: ID does not exist" containerID="a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.863420 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10"} err="failed to get container status \"a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10\": rpc error: code = NotFound desc = could not find container \"a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10\": container with ID starting with a95f3bb920b998246ac8de3023a00fd4a6491e4841bea73d45f12480586f9c10 not found: ID does not exist" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.863446 4829 scope.go:117] "RemoveContainer" containerID="dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f" Jan 22 01:05:17 crc kubenswrapper[4829]: E0122 01:05:17.863806 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f\": container with ID starting with dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f not found: ID does not exist" containerID="dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f" Jan 22 01:05:17 crc kubenswrapper[4829]: I0122 01:05:17.863863 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f"} err="failed to get container status \"dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f\": rpc error: code = NotFound desc = could not find container \"dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f\": container with ID starting with dd792b51da864b34b83a0f1e647704ad4966bd1f9adea34ac8f6ff837920350f not found: ID does not exist" Jan 22 01:05:18 crc kubenswrapper[4829]: I0122 01:05:18.567384 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" path="/var/lib/kubelet/pods/205f187d-3f3d-4b9e-bdbf-83e8037b56b0/volumes" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.346242 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g4zhv"] Jan 22 01:05:22 crc kubenswrapper[4829]: E0122 01:05:22.347057 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="registry-server" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.347082 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="registry-server" Jan 22 01:05:22 crc kubenswrapper[4829]: E0122 01:05:22.347121 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="extract-content" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.347132 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="extract-content" Jan 22 01:05:22 crc kubenswrapper[4829]: E0122 01:05:22.347156 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="extract-utilities" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.347170 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="extract-utilities" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.347434 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="205f187d-3f3d-4b9e-bdbf-83e8037b56b0" containerName="registry-server" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.349116 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.369031 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g4zhv"] Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.523528 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-utilities\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.523883 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjq9x\" (UniqueName: \"kubernetes.io/projected/6425f247-5696-4f31-9603-5b3ea14bfb73-kube-api-access-xjq9x\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.523947 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-catalog-content\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.625529 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjq9x\" (UniqueName: \"kubernetes.io/projected/6425f247-5696-4f31-9603-5b3ea14bfb73-kube-api-access-xjq9x\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.625608 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-catalog-content\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.625720 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-utilities\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.626355 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-catalog-content\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.626403 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-utilities\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.654104 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjq9x\" (UniqueName: \"kubernetes.io/projected/6425f247-5696-4f31-9603-5b3ea14bfb73-kube-api-access-xjq9x\") pod \"certified-operators-g4zhv\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.669465 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:22 crc kubenswrapper[4829]: I0122 01:05:22.918856 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g4zhv"] Jan 22 01:05:23 crc kubenswrapper[4829]: I0122 01:05:23.835132 4829 generic.go:334] "Generic (PLEG): container finished" podID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerID="6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e" exitCode=0 Jan 22 01:05:23 crc kubenswrapper[4829]: I0122 01:05:23.835315 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4zhv" event={"ID":"6425f247-5696-4f31-9603-5b3ea14bfb73","Type":"ContainerDied","Data":"6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e"} Jan 22 01:05:23 crc kubenswrapper[4829]: I0122 01:05:23.837392 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4zhv" event={"ID":"6425f247-5696-4f31-9603-5b3ea14bfb73","Type":"ContainerStarted","Data":"d1489e3b553060ce7736f605b84e3cb668a2bd64b657d6d4f5f7d9b79fd1955a"} Jan 22 01:05:24 crc kubenswrapper[4829]: I0122 01:05:24.853186 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4zhv" event={"ID":"6425f247-5696-4f31-9603-5b3ea14bfb73","Type":"ContainerStarted","Data":"bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f"} Jan 22 01:05:25 crc kubenswrapper[4829]: I0122 01:05:25.862940 4829 generic.go:334] "Generic (PLEG): container finished" podID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerID="bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f" exitCode=0 Jan 22 01:05:25 crc kubenswrapper[4829]: I0122 01:05:25.862990 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4zhv" event={"ID":"6425f247-5696-4f31-9603-5b3ea14bfb73","Type":"ContainerDied","Data":"bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f"} Jan 22 01:05:26 crc kubenswrapper[4829]: I0122 01:05:26.874358 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4zhv" event={"ID":"6425f247-5696-4f31-9603-5b3ea14bfb73","Type":"ContainerStarted","Data":"3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0"} Jan 22 01:05:26 crc kubenswrapper[4829]: I0122 01:05:26.897703 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g4zhv" podStartSLOduration=2.4390795020000002 podStartE2EDuration="4.897675735s" podCreationTimestamp="2026-01-22 01:05:22 +0000 UTC" firstStartedPulling="2026-01-22 01:05:23.83763136 +0000 UTC m=+3501.873873302" lastFinishedPulling="2026-01-22 01:05:26.296227603 +0000 UTC m=+3504.332469535" observedRunningTime="2026-01-22 01:05:26.892289169 +0000 UTC m=+3504.928531091" watchObservedRunningTime="2026-01-22 01:05:26.897675735 +0000 UTC m=+3504.933917667" Jan 22 01:05:32 crc kubenswrapper[4829]: I0122 01:05:32.670631 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:32 crc kubenswrapper[4829]: I0122 01:05:32.671433 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:32 crc kubenswrapper[4829]: I0122 01:05:32.746095 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:32 crc kubenswrapper[4829]: I0122 01:05:32.966116 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:33 crc kubenswrapper[4829]: I0122 01:05:33.014854 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g4zhv"] Jan 22 01:05:34 crc kubenswrapper[4829]: I0122 01:05:34.946748 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g4zhv" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="registry-server" containerID="cri-o://3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0" gracePeriod=2 Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.836002 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.955447 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-catalog-content\") pod \"6425f247-5696-4f31-9603-5b3ea14bfb73\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.956396 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-utilities\") pod \"6425f247-5696-4f31-9603-5b3ea14bfb73\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.956534 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjq9x\" (UniqueName: \"kubernetes.io/projected/6425f247-5696-4f31-9603-5b3ea14bfb73-kube-api-access-xjq9x\") pod \"6425f247-5696-4f31-9603-5b3ea14bfb73\" (UID: \"6425f247-5696-4f31-9603-5b3ea14bfb73\") " Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.958829 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-utilities" (OuterVolumeSpecName: "utilities") pod "6425f247-5696-4f31-9603-5b3ea14bfb73" (UID: "6425f247-5696-4f31-9603-5b3ea14bfb73"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.961728 4829 generic.go:334] "Generic (PLEG): container finished" podID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerID="3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0" exitCode=0 Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.961841 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4zhv" event={"ID":"6425f247-5696-4f31-9603-5b3ea14bfb73","Type":"ContainerDied","Data":"3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0"} Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.961937 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g4zhv" event={"ID":"6425f247-5696-4f31-9603-5b3ea14bfb73","Type":"ContainerDied","Data":"d1489e3b553060ce7736f605b84e3cb668a2bd64b657d6d4f5f7d9b79fd1955a"} Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.962021 4829 scope.go:117] "RemoveContainer" containerID="3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0" Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.962203 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g4zhv" Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.966162 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6425f247-5696-4f31-9603-5b3ea14bfb73-kube-api-access-xjq9x" (OuterVolumeSpecName: "kube-api-access-xjq9x") pod "6425f247-5696-4f31-9603-5b3ea14bfb73" (UID: "6425f247-5696-4f31-9603-5b3ea14bfb73"). InnerVolumeSpecName "kube-api-access-xjq9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:05:35 crc kubenswrapper[4829]: I0122 01:05:35.998346 4829 scope.go:117] "RemoveContainer" containerID="bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.015201 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6425f247-5696-4f31-9603-5b3ea14bfb73" (UID: "6425f247-5696-4f31-9603-5b3ea14bfb73"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.035583 4829 scope.go:117] "RemoveContainer" containerID="6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.054184 4829 scope.go:117] "RemoveContainer" containerID="3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0" Jan 22 01:05:36 crc kubenswrapper[4829]: E0122 01:05:36.054919 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0\": container with ID starting with 3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0 not found: ID does not exist" containerID="3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.055064 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0"} err="failed to get container status \"3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0\": rpc error: code = NotFound desc = could not find container \"3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0\": container with ID starting with 3b9a06f56bcd651881e931349d491ea76e7610b4b9d3167627ee9bcb2b4bf1c0 not found: ID does not exist" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.055174 4829 scope.go:117] "RemoveContainer" containerID="bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f" Jan 22 01:05:36 crc kubenswrapper[4829]: E0122 01:05:36.055617 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f\": container with ID starting with bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f not found: ID does not exist" containerID="bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.055723 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f"} err="failed to get container status \"bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f\": rpc error: code = NotFound desc = could not find container \"bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f\": container with ID starting with bedd396caa47321240539af9b7db21f0819d48bda34ec1a44fb8dee5b4339d5f not found: ID does not exist" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.055821 4829 scope.go:117] "RemoveContainer" containerID="6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e" Jan 22 01:05:36 crc kubenswrapper[4829]: E0122 01:05:36.056287 4829 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e\": container with ID starting with 6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e not found: ID does not exist" containerID="6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.056392 4829 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e"} err="failed to get container status \"6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e\": rpc error: code = NotFound desc = could not find container \"6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e\": container with ID starting with 6006156c29963fab16c2343e5ec173139dc23429325dcf2cc99b4acbf9efda0e not found: ID does not exist" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.058085 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.058124 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjq9x\" (UniqueName: \"kubernetes.io/projected/6425f247-5696-4f31-9603-5b3ea14bfb73-kube-api-access-xjq9x\") on node \"crc\" DevicePath \"\"" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.058134 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6425f247-5696-4f31-9603-5b3ea14bfb73-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.316746 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g4zhv"] Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.329337 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g4zhv"] Jan 22 01:05:36 crc kubenswrapper[4829]: I0122 01:05:36.573223 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" path="/var/lib/kubelet/pods/6425f247-5696-4f31-9603-5b3ea14bfb73/volumes" Jan 22 01:07:04 crc kubenswrapper[4829]: I0122 01:07:04.658851 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:07:04 crc kubenswrapper[4829]: I0122 01:07:04.659784 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:07:34 crc kubenswrapper[4829]: I0122 01:07:34.658536 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:07:34 crc kubenswrapper[4829]: I0122 01:07:34.659245 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:08:04 crc kubenswrapper[4829]: I0122 01:08:04.658669 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:08:04 crc kubenswrapper[4829]: I0122 01:08:04.659720 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:08:04 crc kubenswrapper[4829]: I0122 01:08:04.660042 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 01:08:04 crc kubenswrapper[4829]: I0122 01:08:04.661161 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 01:08:04 crc kubenswrapper[4829]: I0122 01:08:04.661250 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" gracePeriod=600 Jan 22 01:08:04 crc kubenswrapper[4829]: E0122 01:08:04.800694 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:08:05 crc kubenswrapper[4829]: I0122 01:08:05.645708 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" exitCode=0 Jan 22 01:08:05 crc kubenswrapper[4829]: I0122 01:08:05.645768 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6"} Jan 22 01:08:05 crc kubenswrapper[4829]: I0122 01:08:05.646570 4829 scope.go:117] "RemoveContainer" containerID="1a18f67d780b8159297dbb4fe83b23a2b1970de46ebbe229f30e582bf2037a16" Jan 22 01:08:05 crc kubenswrapper[4829]: I0122 01:08:05.647452 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:08:05 crc kubenswrapper[4829]: E0122 01:08:05.647869 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:08:20 crc kubenswrapper[4829]: I0122 01:08:20.554302 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:08:20 crc kubenswrapper[4829]: E0122 01:08:20.555145 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:08:32 crc kubenswrapper[4829]: I0122 01:08:32.557696 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:08:32 crc kubenswrapper[4829]: E0122 01:08:32.558880 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:08:46 crc kubenswrapper[4829]: I0122 01:08:46.553380 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:08:46 crc kubenswrapper[4829]: E0122 01:08:46.554475 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:08:59 crc kubenswrapper[4829]: I0122 01:08:59.554150 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:08:59 crc kubenswrapper[4829]: E0122 01:08:59.555438 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:09:11 crc kubenswrapper[4829]: I0122 01:09:11.554713 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:09:11 crc kubenswrapper[4829]: E0122 01:09:11.557726 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:09:26 crc kubenswrapper[4829]: I0122 01:09:26.554387 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:09:26 crc kubenswrapper[4829]: E0122 01:09:26.555675 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:09:40 crc kubenswrapper[4829]: I0122 01:09:40.553171 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:09:40 crc kubenswrapper[4829]: E0122 01:09:40.554958 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:09:53 crc kubenswrapper[4829]: I0122 01:09:53.553092 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:09:53 crc kubenswrapper[4829]: E0122 01:09:53.553833 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:10:04 crc kubenswrapper[4829]: I0122 01:10:04.553593 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:10:04 crc kubenswrapper[4829]: E0122 01:10:04.554682 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:10:15 crc kubenswrapper[4829]: I0122 01:10:15.554506 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:10:15 crc kubenswrapper[4829]: E0122 01:10:15.555679 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:10:26 crc kubenswrapper[4829]: I0122 01:10:26.554834 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:10:26 crc kubenswrapper[4829]: E0122 01:10:26.555976 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:10:39 crc kubenswrapper[4829]: I0122 01:10:39.554764 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:10:39 crc kubenswrapper[4829]: E0122 01:10:39.555634 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:10:50 crc kubenswrapper[4829]: I0122 01:10:50.563640 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:10:50 crc kubenswrapper[4829]: E0122 01:10:50.564995 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:11:03 crc kubenswrapper[4829]: I0122 01:11:03.553893 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:11:03 crc kubenswrapper[4829]: E0122 01:11:03.554794 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:11:16 crc kubenswrapper[4829]: I0122 01:11:16.555038 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:11:16 crc kubenswrapper[4829]: E0122 01:11:16.555900 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:11:27 crc kubenswrapper[4829]: I0122 01:11:27.553579 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:11:27 crc kubenswrapper[4829]: E0122 01:11:27.554536 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:11:38 crc kubenswrapper[4829]: I0122 01:11:38.558802 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:11:38 crc kubenswrapper[4829]: E0122 01:11:38.559833 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:11:52 crc kubenswrapper[4829]: I0122 01:11:52.575023 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:11:52 crc kubenswrapper[4829]: E0122 01:11:52.577423 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:12:07 crc kubenswrapper[4829]: I0122 01:12:07.553776 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:12:07 crc kubenswrapper[4829]: E0122 01:12:07.556631 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:12:20 crc kubenswrapper[4829]: I0122 01:12:20.554579 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:12:20 crc kubenswrapper[4829]: E0122 01:12:20.555341 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:12:34 crc kubenswrapper[4829]: I0122 01:12:34.589872 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:12:34 crc kubenswrapper[4829]: E0122 01:12:34.590524 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:12:47 crc kubenswrapper[4829]: I0122 01:12:47.553952 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:12:47 crc kubenswrapper[4829]: E0122 01:12:47.554481 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:13:00 crc kubenswrapper[4829]: I0122 01:13:00.557668 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:13:00 crc kubenswrapper[4829]: E0122 01:13:00.558408 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:13:11 crc kubenswrapper[4829]: I0122 01:13:11.553861 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:13:11 crc kubenswrapper[4829]: I0122 01:13:11.800336 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"610ddacf016a57bc43c153558b651b631d528400356b86a2451f9f8a8ec820fb"} Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.060366 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d8lc2"] Jan 22 01:14:32 crc kubenswrapper[4829]: E0122 01:14:32.063498 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="extract-utilities" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.063739 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="extract-utilities" Jan 22 01:14:32 crc kubenswrapper[4829]: E0122 01:14:32.063941 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="registry-server" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.064104 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="registry-server" Jan 22 01:14:32 crc kubenswrapper[4829]: E0122 01:14:32.064370 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="extract-content" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.064507 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="extract-content" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.064863 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="6425f247-5696-4f31-9603-5b3ea14bfb73" containerName="registry-server" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.066661 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.073492 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8lc2"] Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.165160 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpcz4\" (UniqueName: \"kubernetes.io/projected/f9b95095-9fae-44ab-9776-cca29ad9c917-kube-api-access-hpcz4\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.165222 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-catalog-content\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.165299 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-utilities\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.266375 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-utilities\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.266464 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpcz4\" (UniqueName: \"kubernetes.io/projected/f9b95095-9fae-44ab-9776-cca29ad9c917-kube-api-access-hpcz4\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.266493 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-catalog-content\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.267114 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-utilities\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.267137 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-catalog-content\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.286312 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpcz4\" (UniqueName: \"kubernetes.io/projected/f9b95095-9fae-44ab-9776-cca29ad9c917-kube-api-access-hpcz4\") pod \"community-operators-d8lc2\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.405753 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:32 crc kubenswrapper[4829]: I0122 01:14:32.926350 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8lc2"] Jan 22 01:14:32 crc kubenswrapper[4829]: W0122 01:14:32.936937 4829 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9b95095_9fae_44ab_9776_cca29ad9c917.slice/crio-90b37f7b171364e3fdf19fa309dd87c12be97cc4476ce96d149a97f32bb687aa WatchSource:0}: Error finding container 90b37f7b171364e3fdf19fa309dd87c12be97cc4476ce96d149a97f32bb687aa: Status 404 returned error can't find the container with id 90b37f7b171364e3fdf19fa309dd87c12be97cc4476ce96d149a97f32bb687aa Jan 22 01:14:33 crc kubenswrapper[4829]: I0122 01:14:33.502923 4829 generic.go:334] "Generic (PLEG): container finished" podID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerID="6ceabe50f9d2ddd951454f3427401961bbf3d34cce65f01787b7360c90bcce2a" exitCode=0 Jan 22 01:14:33 crc kubenswrapper[4829]: I0122 01:14:33.503106 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lc2" event={"ID":"f9b95095-9fae-44ab-9776-cca29ad9c917","Type":"ContainerDied","Data":"6ceabe50f9d2ddd951454f3427401961bbf3d34cce65f01787b7360c90bcce2a"} Jan 22 01:14:33 crc kubenswrapper[4829]: I0122 01:14:33.504440 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lc2" event={"ID":"f9b95095-9fae-44ab-9776-cca29ad9c917","Type":"ContainerStarted","Data":"90b37f7b171364e3fdf19fa309dd87c12be97cc4476ce96d149a97f32bb687aa"} Jan 22 01:14:33 crc kubenswrapper[4829]: I0122 01:14:33.504647 4829 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 01:14:34 crc kubenswrapper[4829]: I0122 01:14:34.513908 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lc2" event={"ID":"f9b95095-9fae-44ab-9776-cca29ad9c917","Type":"ContainerStarted","Data":"ed43d6ed4a63e4960ed806e5f3ba9147c7dbc54d8e054cb1dc09459286cab504"} Jan 22 01:14:35 crc kubenswrapper[4829]: I0122 01:14:35.527612 4829 generic.go:334] "Generic (PLEG): container finished" podID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerID="ed43d6ed4a63e4960ed806e5f3ba9147c7dbc54d8e054cb1dc09459286cab504" exitCode=0 Jan 22 01:14:35 crc kubenswrapper[4829]: I0122 01:14:35.527684 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lc2" event={"ID":"f9b95095-9fae-44ab-9776-cca29ad9c917","Type":"ContainerDied","Data":"ed43d6ed4a63e4960ed806e5f3ba9147c7dbc54d8e054cb1dc09459286cab504"} Jan 22 01:14:36 crc kubenswrapper[4829]: I0122 01:14:36.542298 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lc2" event={"ID":"f9b95095-9fae-44ab-9776-cca29ad9c917","Type":"ContainerStarted","Data":"f9cd5204e0c3afe6a787ffa4d2d8b0534510edbc6df47a0800ee5bc824728020"} Jan 22 01:14:36 crc kubenswrapper[4829]: I0122 01:14:36.588058 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d8lc2" podStartSLOduration=2.136801753 podStartE2EDuration="4.588030913s" podCreationTimestamp="2026-01-22 01:14:32 +0000 UTC" firstStartedPulling="2026-01-22 01:14:33.504343307 +0000 UTC m=+4051.540585219" lastFinishedPulling="2026-01-22 01:14:35.955572427 +0000 UTC m=+4053.991814379" observedRunningTime="2026-01-22 01:14:36.574691995 +0000 UTC m=+4054.610933987" watchObservedRunningTime="2026-01-22 01:14:36.588030913 +0000 UTC m=+4054.624272865" Jan 22 01:14:42 crc kubenswrapper[4829]: I0122 01:14:42.406495 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:42 crc kubenswrapper[4829]: I0122 01:14:42.407345 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:42 crc kubenswrapper[4829]: I0122 01:14:42.475477 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:42 crc kubenswrapper[4829]: I0122 01:14:42.677289 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:42 crc kubenswrapper[4829]: I0122 01:14:42.739774 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8lc2"] Jan 22 01:14:44 crc kubenswrapper[4829]: I0122 01:14:44.616770 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d8lc2" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="registry-server" containerID="cri-o://f9cd5204e0c3afe6a787ffa4d2d8b0534510edbc6df47a0800ee5bc824728020" gracePeriod=2 Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.624824 4829 generic.go:334] "Generic (PLEG): container finished" podID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerID="f9cd5204e0c3afe6a787ffa4d2d8b0534510edbc6df47a0800ee5bc824728020" exitCode=0 Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.624989 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lc2" event={"ID":"f9b95095-9fae-44ab-9776-cca29ad9c917","Type":"ContainerDied","Data":"f9cd5204e0c3afe6a787ffa4d2d8b0534510edbc6df47a0800ee5bc824728020"} Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.751489 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.902726 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-catalog-content\") pod \"f9b95095-9fae-44ab-9776-cca29ad9c917\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.902806 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-utilities\") pod \"f9b95095-9fae-44ab-9776-cca29ad9c917\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.902894 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpcz4\" (UniqueName: \"kubernetes.io/projected/f9b95095-9fae-44ab-9776-cca29ad9c917-kube-api-access-hpcz4\") pod \"f9b95095-9fae-44ab-9776-cca29ad9c917\" (UID: \"f9b95095-9fae-44ab-9776-cca29ad9c917\") " Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.903869 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-utilities" (OuterVolumeSpecName: "utilities") pod "f9b95095-9fae-44ab-9776-cca29ad9c917" (UID: "f9b95095-9fae-44ab-9776-cca29ad9c917"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.926231 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9b95095-9fae-44ab-9776-cca29ad9c917-kube-api-access-hpcz4" (OuterVolumeSpecName: "kube-api-access-hpcz4") pod "f9b95095-9fae-44ab-9776-cca29ad9c917" (UID: "f9b95095-9fae-44ab-9776-cca29ad9c917"). InnerVolumeSpecName "kube-api-access-hpcz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:14:45 crc kubenswrapper[4829]: I0122 01:14:45.958108 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9b95095-9fae-44ab-9776-cca29ad9c917" (UID: "f9b95095-9fae-44ab-9776-cca29ad9c917"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.004097 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpcz4\" (UniqueName: \"kubernetes.io/projected/f9b95095-9fae-44ab-9776-cca29ad9c917-kube-api-access-hpcz4\") on node \"crc\" DevicePath \"\"" Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.004124 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.004134 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9b95095-9fae-44ab-9776-cca29ad9c917-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.637169 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8lc2" event={"ID":"f9b95095-9fae-44ab-9776-cca29ad9c917","Type":"ContainerDied","Data":"90b37f7b171364e3fdf19fa309dd87c12be97cc4476ce96d149a97f32bb687aa"} Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.637655 4829 scope.go:117] "RemoveContainer" containerID="f9cd5204e0c3afe6a787ffa4d2d8b0534510edbc6df47a0800ee5bc824728020" Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.637302 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8lc2" Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.676749 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8lc2"] Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.682925 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d8lc2"] Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.683988 4829 scope.go:117] "RemoveContainer" containerID="ed43d6ed4a63e4960ed806e5f3ba9147c7dbc54d8e054cb1dc09459286cab504" Jan 22 01:14:46 crc kubenswrapper[4829]: I0122 01:14:46.720213 4829 scope.go:117] "RemoveContainer" containerID="6ceabe50f9d2ddd951454f3427401961bbf3d34cce65f01787b7360c90bcce2a" Jan 22 01:14:48 crc kubenswrapper[4829]: I0122 01:14:48.568404 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" path="/var/lib/kubelet/pods/f9b95095-9fae-44ab-9776-cca29ad9c917/volumes" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.194666 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4"] Jan 22 01:15:00 crc kubenswrapper[4829]: E0122 01:15:00.195436 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="extract-utilities" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.195454 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="extract-utilities" Jan 22 01:15:00 crc kubenswrapper[4829]: E0122 01:15:00.195489 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="registry-server" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.195498 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="registry-server" Jan 22 01:15:00 crc kubenswrapper[4829]: E0122 01:15:00.195513 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="extract-content" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.195521 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="extract-content" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.195705 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9b95095-9fae-44ab-9776-cca29ad9c917" containerName="registry-server" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.196364 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.200478 4829 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.201386 4829 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.213737 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4"] Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.367929 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/612471fb-f4ea-4100-8579-8e3980f5ee49-secret-volume\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.368283 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c672h\" (UniqueName: \"kubernetes.io/projected/612471fb-f4ea-4100-8579-8e3980f5ee49-kube-api-access-c672h\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.368336 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/612471fb-f4ea-4100-8579-8e3980f5ee49-config-volume\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.470623 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/612471fb-f4ea-4100-8579-8e3980f5ee49-secret-volume\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.470683 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c672h\" (UniqueName: \"kubernetes.io/projected/612471fb-f4ea-4100-8579-8e3980f5ee49-kube-api-access-c672h\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.470717 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/612471fb-f4ea-4100-8579-8e3980f5ee49-config-volume\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.471886 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/612471fb-f4ea-4100-8579-8e3980f5ee49-config-volume\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.476646 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/612471fb-f4ea-4100-8579-8e3980f5ee49-secret-volume\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.502519 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c672h\" (UniqueName: \"kubernetes.io/projected/612471fb-f4ea-4100-8579-8e3980f5ee49-kube-api-access-c672h\") pod \"collect-profiles-29484075-t6ll4\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.522659 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:00 crc kubenswrapper[4829]: I0122 01:15:00.943028 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4"] Jan 22 01:15:01 crc kubenswrapper[4829]: I0122 01:15:01.775616 4829 generic.go:334] "Generic (PLEG): container finished" podID="612471fb-f4ea-4100-8579-8e3980f5ee49" containerID="e6695bc1aa3b43d87944dbedb87678d852405d650832c4976413afc0a19f71ab" exitCode=0 Jan 22 01:15:01 crc kubenswrapper[4829]: I0122 01:15:01.775664 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" event={"ID":"612471fb-f4ea-4100-8579-8e3980f5ee49","Type":"ContainerDied","Data":"e6695bc1aa3b43d87944dbedb87678d852405d650832c4976413afc0a19f71ab"} Jan 22 01:15:01 crc kubenswrapper[4829]: I0122 01:15:01.775694 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" event={"ID":"612471fb-f4ea-4100-8579-8e3980f5ee49","Type":"ContainerStarted","Data":"69403999e56b0ebb15784775dcab5e3bee6fa41c9db7e53666571324a4faafe9"} Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.080340 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.208482 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/612471fb-f4ea-4100-8579-8e3980f5ee49-secret-volume\") pod \"612471fb-f4ea-4100-8579-8e3980f5ee49\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.208620 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/612471fb-f4ea-4100-8579-8e3980f5ee49-config-volume\") pod \"612471fb-f4ea-4100-8579-8e3980f5ee49\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.208689 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c672h\" (UniqueName: \"kubernetes.io/projected/612471fb-f4ea-4100-8579-8e3980f5ee49-kube-api-access-c672h\") pod \"612471fb-f4ea-4100-8579-8e3980f5ee49\" (UID: \"612471fb-f4ea-4100-8579-8e3980f5ee49\") " Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.209301 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/612471fb-f4ea-4100-8579-8e3980f5ee49-config-volume" (OuterVolumeSpecName: "config-volume") pod "612471fb-f4ea-4100-8579-8e3980f5ee49" (UID: "612471fb-f4ea-4100-8579-8e3980f5ee49"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.213115 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/612471fb-f4ea-4100-8579-8e3980f5ee49-kube-api-access-c672h" (OuterVolumeSpecName: "kube-api-access-c672h") pod "612471fb-f4ea-4100-8579-8e3980f5ee49" (UID: "612471fb-f4ea-4100-8579-8e3980f5ee49"). InnerVolumeSpecName "kube-api-access-c672h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.213775 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612471fb-f4ea-4100-8579-8e3980f5ee49-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "612471fb-f4ea-4100-8579-8e3980f5ee49" (UID: "612471fb-f4ea-4100-8579-8e3980f5ee49"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.310152 4829 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/612471fb-f4ea-4100-8579-8e3980f5ee49-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.310186 4829 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/612471fb-f4ea-4100-8579-8e3980f5ee49-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.310198 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c672h\" (UniqueName: \"kubernetes.io/projected/612471fb-f4ea-4100-8579-8e3980f5ee49-kube-api-access-c672h\") on node \"crc\" DevicePath \"\"" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.794523 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" event={"ID":"612471fb-f4ea-4100-8579-8e3980f5ee49","Type":"ContainerDied","Data":"69403999e56b0ebb15784775dcab5e3bee6fa41c9db7e53666571324a4faafe9"} Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.795194 4829 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69403999e56b0ebb15784775dcab5e3bee6fa41c9db7e53666571324a4faafe9" Jan 22 01:15:03 crc kubenswrapper[4829]: I0122 01:15:03.794686 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484075-t6ll4" Jan 22 01:15:04 crc kubenswrapper[4829]: I0122 01:15:04.149806 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln"] Jan 22 01:15:04 crc kubenswrapper[4829]: I0122 01:15:04.157809 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484030-t75ln"] Jan 22 01:15:04 crc kubenswrapper[4829]: I0122 01:15:04.599050 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7410ae33-4e2e-4147-9890-36c889741f39" path="/var/lib/kubelet/pods/7410ae33-4e2e-4147-9890-36c889741f39/volumes" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.629741 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6lsqg"] Jan 22 01:15:26 crc kubenswrapper[4829]: E0122 01:15:26.632913 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612471fb-f4ea-4100-8579-8e3980f5ee49" containerName="collect-profiles" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.632943 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="612471fb-f4ea-4100-8579-8e3980f5ee49" containerName="collect-profiles" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.633156 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="612471fb-f4ea-4100-8579-8e3980f5ee49" containerName="collect-profiles" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.636561 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.644351 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6lsqg"] Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.736516 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsxrt\" (UniqueName: \"kubernetes.io/projected/24283674-4d43-4105-b98b-12948b4b14e8-kube-api-access-gsxrt\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.736858 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-utilities\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.736965 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-catalog-content\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.838679 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsxrt\" (UniqueName: \"kubernetes.io/projected/24283674-4d43-4105-b98b-12948b4b14e8-kube-api-access-gsxrt\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.838778 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-utilities\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.838812 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-catalog-content\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.839442 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-catalog-content\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.839500 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-utilities\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.869224 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsxrt\" (UniqueName: \"kubernetes.io/projected/24283674-4d43-4105-b98b-12948b4b14e8-kube-api-access-gsxrt\") pod \"redhat-operators-6lsqg\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:26 crc kubenswrapper[4829]: I0122 01:15:26.968015 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:27 crc kubenswrapper[4829]: I0122 01:15:27.189045 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6lsqg"] Jan 22 01:15:28 crc kubenswrapper[4829]: I0122 01:15:28.091567 4829 generic.go:334] "Generic (PLEG): container finished" podID="24283674-4d43-4105-b98b-12948b4b14e8" containerID="a92d72ecfda70cef8b9e62a4fea88437edbe49c96dfa17643ccfe409a9fbfffd" exitCode=0 Jan 22 01:15:28 crc kubenswrapper[4829]: I0122 01:15:28.091629 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lsqg" event={"ID":"24283674-4d43-4105-b98b-12948b4b14e8","Type":"ContainerDied","Data":"a92d72ecfda70cef8b9e62a4fea88437edbe49c96dfa17643ccfe409a9fbfffd"} Jan 22 01:15:28 crc kubenswrapper[4829]: I0122 01:15:28.091686 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lsqg" event={"ID":"24283674-4d43-4105-b98b-12948b4b14e8","Type":"ContainerStarted","Data":"997e33d22489f82bc8ec286ad422fce5ac35627be5b2b491472d980027fd0ec0"} Jan 22 01:15:30 crc kubenswrapper[4829]: I0122 01:15:30.114641 4829 generic.go:334] "Generic (PLEG): container finished" podID="24283674-4d43-4105-b98b-12948b4b14e8" containerID="3103b522d9b75e788102a1b11b44ab3ad8ace82bfdae75cca86296f812718eb0" exitCode=0 Jan 22 01:15:30 crc kubenswrapper[4829]: I0122 01:15:30.114690 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lsqg" event={"ID":"24283674-4d43-4105-b98b-12948b4b14e8","Type":"ContainerDied","Data":"3103b522d9b75e788102a1b11b44ab3ad8ace82bfdae75cca86296f812718eb0"} Jan 22 01:15:31 crc kubenswrapper[4829]: I0122 01:15:31.127699 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lsqg" event={"ID":"24283674-4d43-4105-b98b-12948b4b14e8","Type":"ContainerStarted","Data":"61cd2e5b8d6d56e14c059f99bcc7dbeb4ddd94337d8967b2e669846a4e1cd2d5"} Jan 22 01:15:31 crc kubenswrapper[4829]: I0122 01:15:31.158154 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6lsqg" podStartSLOduration=2.447310934 podStartE2EDuration="5.15812974s" podCreationTimestamp="2026-01-22 01:15:26 +0000 UTC" firstStartedPulling="2026-01-22 01:15:28.093820076 +0000 UTC m=+4106.130061998" lastFinishedPulling="2026-01-22 01:15:30.804638852 +0000 UTC m=+4108.840880804" observedRunningTime="2026-01-22 01:15:31.151632851 +0000 UTC m=+4109.187874793" watchObservedRunningTime="2026-01-22 01:15:31.15812974 +0000 UTC m=+4109.194371662" Jan 22 01:15:34 crc kubenswrapper[4829]: I0122 01:15:34.658912 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:15:34 crc kubenswrapper[4829]: I0122 01:15:34.659246 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:15:36 crc kubenswrapper[4829]: I0122 01:15:36.969004 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:36 crc kubenswrapper[4829]: I0122 01:15:36.969974 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:45 crc kubenswrapper[4829]: I0122 01:15:45.778695 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:45 crc kubenswrapper[4829]: I0122 01:15:45.923934 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:46 crc kubenswrapper[4829]: I0122 01:15:46.020161 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6lsqg"] Jan 22 01:15:47 crc kubenswrapper[4829]: I0122 01:15:47.279040 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6lsqg" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="registry-server" containerID="cri-o://61cd2e5b8d6d56e14c059f99bcc7dbeb4ddd94337d8967b2e669846a4e1cd2d5" gracePeriod=2 Jan 22 01:15:48 crc kubenswrapper[4829]: I0122 01:15:48.291419 4829 generic.go:334] "Generic (PLEG): container finished" podID="24283674-4d43-4105-b98b-12948b4b14e8" containerID="61cd2e5b8d6d56e14c059f99bcc7dbeb4ddd94337d8967b2e669846a4e1cd2d5" exitCode=0 Jan 22 01:15:48 crc kubenswrapper[4829]: I0122 01:15:48.291514 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lsqg" event={"ID":"24283674-4d43-4105-b98b-12948b4b14e8","Type":"ContainerDied","Data":"61cd2e5b8d6d56e14c059f99bcc7dbeb4ddd94337d8967b2e669846a4e1cd2d5"} Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.169928 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.225826 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsxrt\" (UniqueName: \"kubernetes.io/projected/24283674-4d43-4105-b98b-12948b4b14e8-kube-api-access-gsxrt\") pod \"24283674-4d43-4105-b98b-12948b4b14e8\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.225893 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-utilities\") pod \"24283674-4d43-4105-b98b-12948b4b14e8\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.225962 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-catalog-content\") pod \"24283674-4d43-4105-b98b-12948b4b14e8\" (UID: \"24283674-4d43-4105-b98b-12948b4b14e8\") " Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.227399 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-utilities" (OuterVolumeSpecName: "utilities") pod "24283674-4d43-4105-b98b-12948b4b14e8" (UID: "24283674-4d43-4105-b98b-12948b4b14e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.233990 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24283674-4d43-4105-b98b-12948b4b14e8-kube-api-access-gsxrt" (OuterVolumeSpecName: "kube-api-access-gsxrt") pod "24283674-4d43-4105-b98b-12948b4b14e8" (UID: "24283674-4d43-4105-b98b-12948b4b14e8"). InnerVolumeSpecName "kube-api-access-gsxrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.301846 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6lsqg" event={"ID":"24283674-4d43-4105-b98b-12948b4b14e8","Type":"ContainerDied","Data":"997e33d22489f82bc8ec286ad422fce5ac35627be5b2b491472d980027fd0ec0"} Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.301891 4829 scope.go:117] "RemoveContainer" containerID="61cd2e5b8d6d56e14c059f99bcc7dbeb4ddd94337d8967b2e669846a4e1cd2d5" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.301995 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6lsqg" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.327451 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.327780 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsxrt\" (UniqueName: \"kubernetes.io/projected/24283674-4d43-4105-b98b-12948b4b14e8-kube-api-access-gsxrt\") on node \"crc\" DevicePath \"\"" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.330371 4829 scope.go:117] "RemoveContainer" containerID="3103b522d9b75e788102a1b11b44ab3ad8ace82bfdae75cca86296f812718eb0" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.350467 4829 scope.go:117] "RemoveContainer" containerID="a92d72ecfda70cef8b9e62a4fea88437edbe49c96dfa17643ccfe409a9fbfffd" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.371265 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "24283674-4d43-4105-b98b-12948b4b14e8" (UID: "24283674-4d43-4105-b98b-12948b4b14e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.428796 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/24283674-4d43-4105-b98b-12948b4b14e8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.666679 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6lsqg"] Jan 22 01:15:49 crc kubenswrapper[4829]: I0122 01:15:49.673146 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6lsqg"] Jan 22 01:15:50 crc kubenswrapper[4829]: I0122 01:15:50.570516 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24283674-4d43-4105-b98b-12948b4b14e8" path="/var/lib/kubelet/pods/24283674-4d43-4105-b98b-12948b4b14e8/volumes" Jan 22 01:16:04 crc kubenswrapper[4829]: I0122 01:16:04.362197 4829 scope.go:117] "RemoveContainer" containerID="c64e5fd408d1f7581b988b4f2ad7cb4ae772354f7c5b374368c1b647f7213501" Jan 22 01:16:04 crc kubenswrapper[4829]: I0122 01:16:04.658193 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:16:04 crc kubenswrapper[4829]: I0122 01:16:04.658252 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.646214 4829 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cpwzq"] Jan 22 01:16:06 crc kubenswrapper[4829]: E0122 01:16:06.646599 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="extract-utilities" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.646615 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="extract-utilities" Jan 22 01:16:06 crc kubenswrapper[4829]: E0122 01:16:06.646632 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="extract-content" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.646640 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="extract-content" Jan 22 01:16:06 crc kubenswrapper[4829]: E0122 01:16:06.646647 4829 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="registry-server" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.646653 4829 state_mem.go:107] "Deleted CPUSet assignment" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="registry-server" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.646787 4829 memory_manager.go:354] "RemoveStaleState removing state" podUID="24283674-4d43-4105-b98b-12948b4b14e8" containerName="registry-server" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.647870 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.665346 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cpwzq"] Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.844229 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s928n\" (UniqueName: \"kubernetes.io/projected/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-kube-api-access-s928n\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.844670 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-catalog-content\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.844961 4829 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-utilities\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.947554 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s928n\" (UniqueName: \"kubernetes.io/projected/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-kube-api-access-s928n\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.947646 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-catalog-content\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.947689 4829 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-utilities\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.948572 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-utilities\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.948809 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-catalog-content\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.975575 4829 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s928n\" (UniqueName: \"kubernetes.io/projected/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-kube-api-access-s928n\") pod \"certified-operators-cpwzq\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:06 crc kubenswrapper[4829]: I0122 01:16:06.978232 4829 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:07 crc kubenswrapper[4829]: I0122 01:16:07.292563 4829 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cpwzq"] Jan 22 01:16:07 crc kubenswrapper[4829]: I0122 01:16:07.467529 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cpwzq" event={"ID":"086c27b2-d7dd-4b1f-9db0-77f78daf06f1","Type":"ContainerStarted","Data":"43c6bada4aa7262fbaf52f4fb9e244fc713c5cfd1c984e4184d569a9807320e8"} Jan 22 01:16:08 crc kubenswrapper[4829]: I0122 01:16:08.482349 4829 generic.go:334] "Generic (PLEG): container finished" podID="086c27b2-d7dd-4b1f-9db0-77f78daf06f1" containerID="99f3b501b71f6eff4b0405966a5216e17d8f87297dd40d4d01699c5ee7e959a6" exitCode=0 Jan 22 01:16:08 crc kubenswrapper[4829]: I0122 01:16:08.482704 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cpwzq" event={"ID":"086c27b2-d7dd-4b1f-9db0-77f78daf06f1","Type":"ContainerDied","Data":"99f3b501b71f6eff4b0405966a5216e17d8f87297dd40d4d01699c5ee7e959a6"} Jan 22 01:16:10 crc kubenswrapper[4829]: I0122 01:16:10.503608 4829 generic.go:334] "Generic (PLEG): container finished" podID="086c27b2-d7dd-4b1f-9db0-77f78daf06f1" containerID="397aaf83be1e04421447e36495b8cd7a7420a23fb332a54bd76ce78f534fb657" exitCode=0 Jan 22 01:16:10 crc kubenswrapper[4829]: I0122 01:16:10.503724 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cpwzq" event={"ID":"086c27b2-d7dd-4b1f-9db0-77f78daf06f1","Type":"ContainerDied","Data":"397aaf83be1e04421447e36495b8cd7a7420a23fb332a54bd76ce78f534fb657"} Jan 22 01:16:11 crc kubenswrapper[4829]: I0122 01:16:11.516218 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cpwzq" event={"ID":"086c27b2-d7dd-4b1f-9db0-77f78daf06f1","Type":"ContainerStarted","Data":"a392841bc78ba072de3fe47cabcccbc066483863e9347d2a7338a7016503c088"} Jan 22 01:16:11 crc kubenswrapper[4829]: I0122 01:16:11.554463 4829 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cpwzq" podStartSLOduration=3.09790459 podStartE2EDuration="5.554442033s" podCreationTimestamp="2026-01-22 01:16:06 +0000 UTC" firstStartedPulling="2026-01-22 01:16:08.487834649 +0000 UTC m=+4146.524076591" lastFinishedPulling="2026-01-22 01:16:10.944372122 +0000 UTC m=+4148.980614034" observedRunningTime="2026-01-22 01:16:11.542405326 +0000 UTC m=+4149.578647278" watchObservedRunningTime="2026-01-22 01:16:11.554442033 +0000 UTC m=+4149.590683965" Jan 22 01:16:16 crc kubenswrapper[4829]: I0122 01:16:16.978946 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:16 crc kubenswrapper[4829]: I0122 01:16:16.979528 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:17 crc kubenswrapper[4829]: I0122 01:16:17.029895 4829 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:17 crc kubenswrapper[4829]: I0122 01:16:17.637195 4829 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:17 crc kubenswrapper[4829]: I0122 01:16:17.707377 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cpwzq"] Jan 22 01:16:19 crc kubenswrapper[4829]: I0122 01:16:19.587239 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cpwzq" podUID="086c27b2-d7dd-4b1f-9db0-77f78daf06f1" containerName="registry-server" containerID="cri-o://a392841bc78ba072de3fe47cabcccbc066483863e9347d2a7338a7016503c088" gracePeriod=2 Jan 22 01:16:20 crc kubenswrapper[4829]: I0122 01:16:20.599959 4829 generic.go:334] "Generic (PLEG): container finished" podID="086c27b2-d7dd-4b1f-9db0-77f78daf06f1" containerID="a392841bc78ba072de3fe47cabcccbc066483863e9347d2a7338a7016503c088" exitCode=0 Jan 22 01:16:20 crc kubenswrapper[4829]: I0122 01:16:20.600020 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cpwzq" event={"ID":"086c27b2-d7dd-4b1f-9db0-77f78daf06f1","Type":"ContainerDied","Data":"a392841bc78ba072de3fe47cabcccbc066483863e9347d2a7338a7016503c088"} Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.093177 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.285399 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-utilities\") pod \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.285613 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s928n\" (UniqueName: \"kubernetes.io/projected/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-kube-api-access-s928n\") pod \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.286034 4829 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-catalog-content\") pod \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\" (UID: \"086c27b2-d7dd-4b1f-9db0-77f78daf06f1\") " Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.287040 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-utilities" (OuterVolumeSpecName: "utilities") pod "086c27b2-d7dd-4b1f-9db0-77f78daf06f1" (UID: "086c27b2-d7dd-4b1f-9db0-77f78daf06f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.292815 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-kube-api-access-s928n" (OuterVolumeSpecName: "kube-api-access-s928n") pod "086c27b2-d7dd-4b1f-9db0-77f78daf06f1" (UID: "086c27b2-d7dd-4b1f-9db0-77f78daf06f1"). InnerVolumeSpecName "kube-api-access-s928n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.351300 4829 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "086c27b2-d7dd-4b1f-9db0-77f78daf06f1" (UID: "086c27b2-d7dd-4b1f-9db0-77f78daf06f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.388185 4829 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.388250 4829 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.388271 4829 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s928n\" (UniqueName: \"kubernetes.io/projected/086c27b2-d7dd-4b1f-9db0-77f78daf06f1-kube-api-access-s928n\") on node \"crc\" DevicePath \"\"" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.614970 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cpwzq" event={"ID":"086c27b2-d7dd-4b1f-9db0-77f78daf06f1","Type":"ContainerDied","Data":"43c6bada4aa7262fbaf52f4fb9e244fc713c5cfd1c984e4184d569a9807320e8"} Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.615042 4829 scope.go:117] "RemoveContainer" containerID="a392841bc78ba072de3fe47cabcccbc066483863e9347d2a7338a7016503c088" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.615239 4829 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cpwzq" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.653481 4829 scope.go:117] "RemoveContainer" containerID="397aaf83be1e04421447e36495b8cd7a7420a23fb332a54bd76ce78f534fb657" Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.688666 4829 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cpwzq"] Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.696224 4829 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cpwzq"] Jan 22 01:16:21 crc kubenswrapper[4829]: I0122 01:16:21.705831 4829 scope.go:117] "RemoveContainer" containerID="99f3b501b71f6eff4b0405966a5216e17d8f87297dd40d4d01699c5ee7e959a6" Jan 22 01:16:22 crc kubenswrapper[4829]: I0122 01:16:22.567087 4829 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="086c27b2-d7dd-4b1f-9db0-77f78daf06f1" path="/var/lib/kubelet/pods/086c27b2-d7dd-4b1f-9db0-77f78daf06f1/volumes" Jan 22 01:16:34 crc kubenswrapper[4829]: I0122 01:16:34.658451 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:16:34 crc kubenswrapper[4829]: I0122 01:16:34.658828 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:16:34 crc kubenswrapper[4829]: I0122 01:16:34.658896 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 01:16:34 crc kubenswrapper[4829]: I0122 01:16:34.659729 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"610ddacf016a57bc43c153558b651b631d528400356b86a2451f9f8a8ec820fb"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 01:16:34 crc kubenswrapper[4829]: I0122 01:16:34.659796 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://610ddacf016a57bc43c153558b651b631d528400356b86a2451f9f8a8ec820fb" gracePeriod=600 Jan 22 01:16:35 crc kubenswrapper[4829]: I0122 01:16:35.752394 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="610ddacf016a57bc43c153558b651b631d528400356b86a2451f9f8a8ec820fb" exitCode=0 Jan 22 01:16:35 crc kubenswrapper[4829]: I0122 01:16:35.752837 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"610ddacf016a57bc43c153558b651b631d528400356b86a2451f9f8a8ec820fb"} Jan 22 01:16:35 crc kubenswrapper[4829]: I0122 01:16:35.752880 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerStarted","Data":"bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b"} Jan 22 01:16:35 crc kubenswrapper[4829]: I0122 01:16:35.752907 4829 scope.go:117] "RemoveContainer" containerID="013f3fec027f77945388d7b717d774b4baef68027864e4960f87f3609d8cfdd6" Jan 22 01:19:04 crc kubenswrapper[4829]: I0122 01:19:04.659280 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:19:04 crc kubenswrapper[4829]: I0122 01:19:04.659887 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:19:34 crc kubenswrapper[4829]: I0122 01:19:34.659109 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:19:34 crc kubenswrapper[4829]: I0122 01:19:34.659618 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:20:04 crc kubenswrapper[4829]: I0122 01:20:04.659215 4829 patch_prober.go:28] interesting pod/machine-config-daemon-x4jcr container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 01:20:04 crc kubenswrapper[4829]: I0122 01:20:04.660011 4829 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 01:20:04 crc kubenswrapper[4829]: I0122 01:20:04.660084 4829 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" Jan 22 01:20:04 crc kubenswrapper[4829]: I0122 01:20:04.660965 4829 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b"} pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 01:20:04 crc kubenswrapper[4829]: I0122 01:20:04.661060 4829 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerName="machine-config-daemon" containerID="cri-o://bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b" gracePeriod=600 Jan 22 01:20:04 crc kubenswrapper[4829]: E0122 01:20:04.729968 4829 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf42b723d_cbe5_4bc3_8b03_f1d30d26c8fc.slice/crio-conmon-bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b.scope\": RecentStats: unable to find data in memory cache]" Jan 22 01:20:04 crc kubenswrapper[4829]: E0122 01:20:04.830958 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:20:05 crc kubenswrapper[4829]: I0122 01:20:05.712977 4829 generic.go:334] "Generic (PLEG): container finished" podID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" containerID="bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b" exitCode=0 Jan 22 01:20:05 crc kubenswrapper[4829]: I0122 01:20:05.713161 4829 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" event={"ID":"f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc","Type":"ContainerDied","Data":"bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b"} Jan 22 01:20:05 crc kubenswrapper[4829]: I0122 01:20:05.713386 4829 scope.go:117] "RemoveContainer" containerID="610ddacf016a57bc43c153558b651b631d528400356b86a2451f9f8a8ec820fb" Jan 22 01:20:05 crc kubenswrapper[4829]: I0122 01:20:05.714304 4829 scope.go:117] "RemoveContainer" containerID="bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b" Jan 22 01:20:05 crc kubenswrapper[4829]: E0122 01:20:05.715031 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:20:20 crc kubenswrapper[4829]: I0122 01:20:20.553957 4829 scope.go:117] "RemoveContainer" containerID="bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b" Jan 22 01:20:20 crc kubenswrapper[4829]: E0122 01:20:20.554884 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:20:31 crc kubenswrapper[4829]: I0122 01:20:31.553725 4829 scope.go:117] "RemoveContainer" containerID="bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b" Jan 22 01:20:31 crc kubenswrapper[4829]: E0122 01:20:31.554666 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:20:44 crc kubenswrapper[4829]: I0122 01:20:44.553532 4829 scope.go:117] "RemoveContainer" containerID="bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b" Jan 22 01:20:44 crc kubenswrapper[4829]: E0122 01:20:44.554557 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" Jan 22 01:20:58 crc kubenswrapper[4829]: I0122 01:20:58.553775 4829 scope.go:117] "RemoveContainer" containerID="bd6cef5552602e772e1335a8ac4f5f384eb544230db53986caf1a48624d9760b" Jan 22 01:20:58 crc kubenswrapper[4829]: E0122 01:20:58.554520 4829 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-x4jcr_openshift-machine-config-operator(f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc)\"" pod="openshift-machine-config-operator/machine-config-daemon-x4jcr" podUID="f42b723d-cbe5-4bc3-8b03-f1d30d26c8fc" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134275606024456 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134275607017374 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134264527016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134264527015466 5ustar corecore